add ppocr
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +7 -0
- .gitignore +2 -1
- PaddleOCR_ali1k_det_rec_300epoch_standalone/.gitattributes +57 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/.gitignore +47 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/README.md +289 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/__init__.py +0 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/arabic.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/chinese_cht.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/cyrillic.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/french.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/german.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/hindi.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/japan.ttc +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/kannada.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/korean.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/latin.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/marathi.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/nepali.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/persian.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/simfang.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/spanish.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/tamil.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/telugu.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/urdu.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/uyghur.ttf +3 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/freeze.txt +59 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/__init__.py +33 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/any_pb2.py +26 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/api_pb2.py +32 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/compiler/__init__.py +0 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/compiler/plugin_pb2.py +35 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor.py +1224 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor_database.py +177 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor_pb2.py +0 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor_pool.py +1295 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/duration_pb2.py +26 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/empty_pb2.py +26 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/field_mask_pb2.py +26 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/__init__.py +0 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/api_implementation.py +112 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/builder.py +130 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/containers.py +710 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/decoder.py +1029 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/encoder.py +829 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/enum_type_wrapper.py +124 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/extension_dict.py +213 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/message_listener.py +78 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/python_message.py +1539 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/type_checkers.py +435 -0
- PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/well_known_types.py +878 -0
.gitattributes
CHANGED
|
@@ -2,6 +2,13 @@
|
|
| 2 |
*.pdf filter=lfs diff=lfs merge=lfs -text
|
| 3 |
pdfs/**/*.json filter=lfs diff=lfs merge=lfs -text
|
| 4 |
pdfs/**/*.txt filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 6 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 7 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 2 |
*.pdf filter=lfs diff=lfs merge=lfs -text
|
| 3 |
pdfs/**/*.json filter=lfs diff=lfs merge=lfs -text
|
| 4 |
pdfs/**/*.txt filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
# ppocr
|
| 6 |
+
*.pdopt filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.pdparams filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.pdiparams filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.pdmodel filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.states filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.info filter=lfs diff=lfs merge=lfs -text
|
| 12 |
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 13 |
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 14 |
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
.gitignore
CHANGED
|
@@ -31,4 +31,5 @@ daciling_pre.json
|
|
| 31 |
daciling_zh.txt
|
| 32 |
*.aac
|
| 33 |
bt.jpeg
|
| 34 |
-
*log.txt
|
|
|
|
|
|
| 31 |
daciling_zh.txt
|
| 32 |
*.aac
|
| 33 |
bt.jpeg
|
| 34 |
+
*log.txt
|
| 35 |
+
paddleocr.egg-info/
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/.gitattributes
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.so filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.so.[0-9] filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
*.mp3 filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
*.wav filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
*.jpg filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
*.png filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
*.webp filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
*.mp4 filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
*.db filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
# ppocr
|
| 47 |
+
*.pdopt filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
*.pdparams filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
*.pdiparams filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
*.pdmodel filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
*.states filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
*.info filter=lfs diff=lfs merge=lfs -text
|
| 53 |
+
# font
|
| 54 |
+
*.ttf filter=lfs diff=lfs merge=lfs -text
|
| 55 |
+
*.ttc filter=lfs diff=lfs merge=lfs -text
|
| 56 |
+
|
| 57 |
+
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/.gitignore
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# node_modules
|
| 2 |
+
# __pycache__
|
| 3 |
+
# flagged
|
| 4 |
+
# exported
|
| 5 |
+
# package-lock.json
|
| 6 |
+
# package.json
|
| 7 |
+
# output.mp4
|
| 8 |
+
# #config.json
|
| 9 |
+
|
| 10 |
+
# ######### PaddleOCR_ali1k_det_rec_300epoch begin ########################
|
| 11 |
+
|
| 12 |
+
# # Byte-compiled / optimized / DLL files
|
| 13 |
+
# __pycache__/
|
| 14 |
+
# .ipynb_checkpoints/
|
| 15 |
+
# *.py[cod]
|
| 16 |
+
# *$py.class
|
| 17 |
+
|
| 18 |
+
# # C extensions
|
| 19 |
+
# # *.so
|
| 20 |
+
|
| 21 |
+
# # inference/
|
| 22 |
+
# inference_results/
|
| 23 |
+
# # output/
|
| 24 |
+
# # train_data/
|
| 25 |
+
# log/
|
| 26 |
+
# *.DS_Store
|
| 27 |
+
# *.vs
|
| 28 |
+
# *.user
|
| 29 |
+
# *~
|
| 30 |
+
# *.vscode
|
| 31 |
+
# *.idea
|
| 32 |
+
|
| 33 |
+
# # *.log
|
| 34 |
+
# .clang-format
|
| 35 |
+
# .clang_format.hook
|
| 36 |
+
|
| 37 |
+
# build/
|
| 38 |
+
# dist/
|
| 39 |
+
# paddleocr.egg-info/
|
| 40 |
+
# /deploy/android_demo/app/OpenCV/
|
| 41 |
+
# /deploy/android_demo/app/PaddleLite/
|
| 42 |
+
# /deploy/android_demo/app/.cxx/
|
| 43 |
+
# /deploy/android_demo/app/cache/
|
| 44 |
+
# test_tipc/web/models/
|
| 45 |
+
# test_tipc/web/node_modules/
|
| 46 |
+
|
| 47 |
+
# ######### PaddleOCR_ali1k_det_rec_300epoch end ########################
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/README.md
ADDED
|
@@ -0,0 +1,289 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
English | [简体中文](README_ch.md)
|
| 2 |
+
|
| 3 |
+
see 深入理解神经网络:从逻辑回归到CNN.md -> PaddleOCR_ali1k_det_rec_300epoch
|
| 4 |
+
|
| 5 |
+
Illegal instruction 错误
|
| 6 |
+
|
| 7 |
+
It can be fixed by PR , We skip SelfAttentionFusePass on non-avx512 platform
|
| 8 |
+
|
| 9 |
+
pip uninstall paddlepaddle
|
| 10 |
+
|
| 11 |
+
pip install paddlepaddle==2.4.2
|
| 12 |
+
|
| 13 |
+
pip install -r requirements.txt
|
| 14 |
+
|
| 15 |
+
python tools/infer/predict_rec.py --image_dir=train_data/rec/test/1_crop_5.jpg --rec_model_dir=output/rec_model/Student --rec_char_dict_path=train_data/keys.txt
|
| 16 |
+
|
| 17 |
+
PaddleOCR_ali1k_det_rec_300epoch.7z
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
```
|
| 21 |
+
source activate PP && \
|
| 22 |
+
pip uninstall opencv-python && \
|
| 23 |
+
pip install opencv-python==4.6.0.66 && \
|
| 24 |
+
pip install pyyaml
|
| 25 |
+
|
| 26 |
+
cp autodl-tmp/train_data.zip . && \
|
| 27 |
+
unzip train_data.zip -d PaddleOCR
|
| 28 |
+
|
| 29 |
+
# 空间不够用软链接
|
| 30 |
+
ln -s /root/autodl-tmp/train_data /root/PaddleOCR/train_data
|
| 31 |
+
|
| 32 |
+
cd PPOCRLabel && \
|
| 33 |
+
python gen_ocr_train_val_test.py
|
| 34 |
+
|
| 35 |
+
# 训练
|
| 36 |
+
source activate PP && \
|
| 37 |
+
python tools/train.py -c configs/rec/PP-OCRv3/ch_PP-OCRv3_rec_distillation.yml
|
| 38 |
+
|
| 39 |
+
# 继续上一次训练(epoch 接着上一次的断点开始)
|
| 40 |
+
source activate PP && \
|
| 41 |
+
python tools/train.py -c configs/rec/PP-OCRv3/ch_PP-OCRv3_rec_distillation.yml -o Global.checkpoints=output/rec_ppocr_v3_distillation/best_accuracy
|
| 42 |
+
|
| 43 |
+
# 微调 (epoch 从一开始)
|
| 44 |
+
source activate PP && \
|
| 45 |
+
python tools/train.py -c configs/rec/PP-OCRv3/ch_PP-OCRv3_rec_distillation.yml -o Global.pretrained_model=output/rec_ppocr_v3_distillation/best_accuracy
|
| 46 |
+
|
| 47 |
+
# 导出模型
|
| 48 |
+
python tools/export_model.py -c configs/rec/PP-OCRv3/ch_PP-OCRv3_rec_distillation.yml -o Global.checkpoints=output/rec_ppocr_v3_distillation/best_accuracy Global.save_inference_dir=output/model
|
| 49 |
+
|
| 50 |
+
# 推断
|
| 51 |
+
python tools/infer/predict_rec.py --image_dir=train_data/rec/test/1_crop_0.jpg --rec_model_dir=output/model/Student --rec_char_dict_path=train_data/keys.txt
|
| 52 |
+
# train_data/keys.txt 是自已生成的自定义词典,训练的时侯也要指定这个词典
|
| 53 |
+
```
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
<p align="center">
|
| 57 |
+
<img src="./doc/PaddleOCR_log.png" align="middle" width = "600"/>
|
| 58 |
+
<p align="center">
|
| 59 |
+
<p align="left">
|
| 60 |
+
<a href="./LICENSE"><img src="https://img.shields.io/badge/license-Apache%202-dfd.svg"></a>
|
| 61 |
+
<a href="https://github.com/PaddlePaddle/PaddleOCR/releases"><img src="https://img.shields.io/github/v/release/PaddlePaddle/PaddleOCR?color=ffa"></a>
|
| 62 |
+
<a href=""><img src="https://img.shields.io/badge/python-3.7+-aff.svg"></a>
|
| 63 |
+
<a href=""><img src="https://img.shields.io/badge/os-linux%2C%20win%2C%20mac-pink.svg"></a>
|
| 64 |
+
<a href=""><img src="https://img.shields.io/pypi/format/PaddleOCR?color=c77"></a>
|
| 65 |
+
<a href="https://pypi.org/project/PaddleOCR/"><img src="https://img.shields.io/pypi/dm/PaddleOCR?color=9cf"></a>
|
| 66 |
+
<a href="https://github.com/PaddlePaddle/PaddleOCR/stargazers"><img src="https://img.shields.io/github/stars/PaddlePaddle/PaddleOCR?color=ccf"></a>
|
| 67 |
+
</p>
|
| 68 |
+
|
| 69 |
+
## Introduction
|
| 70 |
+
|
| 71 |
+
PaddleOCR aims to create multilingual, awesome, leading, and practical OCR tools that help users train better models and apply them into practice.
|
| 72 |
+
|
| 73 |
+
<div align="center">
|
| 74 |
+
<img src="./doc/imgs_results/PP-OCRv3/en/en_4.png" width="800">
|
| 75 |
+
</div>
|
| 76 |
+
|
| 77 |
+
<div align="center">
|
| 78 |
+
<img src="./doc/imgs_results/ch_ppocr_mobile_v2.0/00006737.jpg" width="800">
|
| 79 |
+
</div>
|
| 80 |
+
|
| 81 |
+
## 📣 Recent updates
|
| 82 |
+
- **🔥2022.8.24 Release PaddleOCR [release/2.6](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.6)**
|
| 83 |
+
- Release [PP-Structurev2](./ppstructure/),with functions and performance fully upgraded, adapted to Chinese scenes, and new support for [Layout Recovery](./ppstructure/recovery) and **one line command to convert PDF to Word**;
|
| 84 |
+
- [Layout Analysis](./ppstructure/layout) optimization: model storage reduced by 95%, while speed increased by 11 times, and the average CPU time-cost is only 41ms;
|
| 85 |
+
- [Table Recognition](./ppstructure/table) optimization: 3 optimization strategies are designed, and the model accuracy is improved by 6% under comparable time consumption;
|
| 86 |
+
- [Key Information Extraction](./ppstructure/kie) optimization:a visual-independent model structure is designed, the accuracy of semantic entity recognition is increased by 2.8%, and the accuracy of relation extraction is increased by 9.1%.
|
| 87 |
+
- **🔥2022.8 Release [OCR scene application collection](./applications/README_en.md)**
|
| 88 |
+
- Release **9 vertical models** such as digital tube, LCD screen, license plate, handwriting recognition model, high-precision SVTR model, etc, covering the main OCR vertical applications in general, manufacturing, finance, and transportation industries.
|
| 89 |
+
- **2022.8 Add implementation of [8 cutting-edge algorithms](doc/doc_en/algorithm_overview_en.md)**
|
| 90 |
+
- Text Detection: [FCENet](doc/doc_en/algorithm_det_fcenet_en.md), [DB++](doc/doc_en/algorithm_det_db_en.md)
|
| 91 |
+
- Text Recognition: [ViTSTR](doc/doc_en/algorithm_rec_vitstr_en.md), [ABINet](doc/doc_en/algorithm_rec_abinet_en.md), [VisionLAN](doc/doc_en/algorithm_rec_visionlan_en.md), [SPIN](doc/doc_en/algorithm_rec_spin_en.md), [RobustScanner](doc/doc_en/algorithm_rec_robustscanner_en.md)
|
| 92 |
+
- Table Recognition: [TableMaster](doc/doc_en/algorithm_table_master_en.md)
|
| 93 |
+
- **2022.5.9 Release PaddleOCR [release/2.5](https://github.com/PaddlePaddle/PaddleOCR/tree/release/2.5)**
|
| 94 |
+
- Release [PP-OCRv3](./doc/doc_en/ppocr_introduction_en.md#pp-ocrv3): With comparable speed, the effect of Chinese scene is further improved by 5% compared with PP-OCRv2, the effect of English scene is improved by 11%, and the average recognition accuracy of 80 language multilingual models is improved by more than 5%.
|
| 95 |
+
- Release [PPOCRLabelv2](./PPOCRLabel): Add the annotation function for table recognition task, key information extraction task and irregular text image.
|
| 96 |
+
- Release interactive e-book [*"Dive into OCR"*](./doc/doc_en/ocr_book_en.md), covers the cutting-edge theory and code practice of OCR full stack technology.
|
| 97 |
+
- [more](./doc/doc_en/update_en.md)
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
## 🌟 Features
|
| 101 |
+
|
| 102 |
+
PaddleOCR support a variety of cutting-edge algorithms related to OCR, and developed industrial featured models/solution [PP-OCR](./doc/doc_en/ppocr_introduction_en.md) and [PP-Structure](./ppstructure/README.md) on this basis, and get through the whole process of data production, model training, compression, inference and deployment.
|
| 103 |
+
|
| 104 |
+
<div align="center">
|
| 105 |
+
<img src="https://user-images.githubusercontent.com/25809855/186171245-40abc4d7-904f-4949-ade1-250f86ed3a90.png">
|
| 106 |
+
</div>
|
| 107 |
+
|
| 108 |
+
> It is recommended to start with the “quick experience” in the document tutorial
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
## ⚡ Quick Experience
|
| 112 |
+
|
| 113 |
+
- Web online experience for the ultra-lightweight OCR: [Online Experience](https://www.paddlepaddle.org.cn/hub/scene/ocr)
|
| 114 |
+
- Mobile DEMO experience (based on EasyEdge and Paddle-Lite, supports iOS and Android systems): [Sign in to the website to obtain the QR code for installing the App](https://ai.baidu.com/easyedge/app/openSource?from=paddlelite)
|
| 115 |
+
- One line of code quick use: [Quick Start](./doc/doc_en/quickstart_en.md)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
<a name="book"></a>
|
| 119 |
+
## 📚 E-book: *Dive Into OCR*
|
| 120 |
+
- [Dive Into OCR ](./doc/doc_en/ocr_book_en.md)
|
| 121 |
+
|
| 122 |
+
<a name="Community"></a>
|
| 123 |
+
## 👫 Community
|
| 124 |
+
|
| 125 |
+
- For international developers, we regard [PaddleOCR Discussions](https://github.com/PaddlePaddle/PaddleOCR/discussions) as our international community platform. All ideas and questions can be discussed here in English.
|
| 126 |
+
|
| 127 |
+
- For Chinese develops, Scan the QR code below with your Wechat, you can join the official technical discussion group. For richer community content, please refer to [中文README](README_ch.md), looking forward to your participation.
|
| 128 |
+
|
| 129 |
+
<div align="center">
|
| 130 |
+
<img src="https://raw.githubusercontent.com/PaddlePaddle/PaddleOCR/dygraph/doc/joinus.PNG" width = "150" height = "150" />
|
| 131 |
+
</div>
|
| 132 |
+
|
| 133 |
+
<a name="Supported-Chinese-model-list"></a>
|
| 134 |
+
|
| 135 |
+
## 🛠️ PP-OCR Series Model List(Update on September 8th)
|
| 136 |
+
|
| 137 |
+
| Model introduction | Model name | Recommended scene | Detection model | Direction classifier | Recognition model |
|
| 138 |
+
| ------------------------------------------------------------ | ---------------------------- | ----------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ |
|
| 139 |
+
| Chinese and English ultra-lightweight PP-OCRv3 model(16.2M) | ch_PP-OCRv3_xx | Mobile & Server | [inference model](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_det_distill_train.tar) | [inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_train.tar) | [inference model](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/PP-OCRv3/chinese/ch_PP-OCRv3_rec_train.tar) |
|
| 140 |
+
| English ultra-lightweight PP-OCRv3 model(13.4M) | en_PP-OCRv3_xx | Mobile & Server | [inference model](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_det_distill_train.tar) | [inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_train.tar) | [inference model](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/PP-OCRv3/english/en_PP-OCRv3_rec_train.tar) |
|
| 141 |
+
| Chinese and English ultra-lightweight PP-OCRv2 model(11.6M) | ch_PP-OCRv2_xx |Mobile & Server|[inference model](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_det_distill_train.tar)| [inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_train.tar) |[inference model](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/PP-OCRv2/chinese/ch_PP-OCRv2_rec_train.tar)|
|
| 142 |
+
| Chinese and English ultra-lightweight PP-OCR model (9.4M) | ch_ppocr_mobile_v2.0_xx | Mobile & server |[inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_det_train.tar)|[inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_train.tar) |[inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_rec_train.tar) |
|
| 143 |
+
| Chinese and English general PP-OCR model (143.4M) | ch_ppocr_server_v2.0_xx | Server |[inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_det_train.tar) |[inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_mobile_v2.0_cls_train.tar) |[inference model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_infer.tar) / [trained model](https://paddleocr.bj.bcebos.com/dygraph_v2.0/ch/ch_ppocr_server_v2.0_rec_train.tar) |
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
- For more model downloads (including multiple languages), please refer to [PP-OCR series model downloads](./doc/doc_en/models_list_en.md).
|
| 147 |
+
- For a new language request, please refer to [Guideline for new language_requests](#language_requests).
|
| 148 |
+
- For structural document analysis models, please refer to [PP-Structure models](./ppstructure/docs/models_list_en.md).
|
| 149 |
+
|
| 150 |
+
## 📖 Tutorials
|
| 151 |
+
- [Environment Preparation](./doc/doc_en/environment_en.md)
|
| 152 |
+
- [PP-OCR 🔥](./doc/doc_en/ppocr_introduction_en.md)
|
| 153 |
+
- [Quick Start](./doc/doc_en/quickstart_en.md)
|
| 154 |
+
- [Model Zoo](./doc/doc_en/models_en.md)
|
| 155 |
+
- [Model training](./doc/doc_en/training_en.md)
|
| 156 |
+
- [Text Detection](./doc/doc_en/detection_en.md)
|
| 157 |
+
- [Text Recognition](./doc/doc_en/recognition_en.md)
|
| 158 |
+
- [Text Direction Classification](./doc/doc_en/angle_class_en.md)
|
| 159 |
+
- Model Compression
|
| 160 |
+
- [Model Quantization](./deploy/slim/quantization/README_en.md)
|
| 161 |
+
- [Model Pruning](./deploy/slim/prune/README_en.md)
|
| 162 |
+
- [Knowledge Distillation](./doc/doc_en/knowledge_distillation_en.md)
|
| 163 |
+
- [Inference and Deployment](./deploy/README.md)
|
| 164 |
+
- [Python Inference](./doc/doc_en/inference_ppocr_en.md)
|
| 165 |
+
- [C++ Inference](./deploy/cpp_infer/readme.md)
|
| 166 |
+
- [Serving](./deploy/pdserving/README.md)
|
| 167 |
+
- [Mobile](./deploy/lite/readme.md)
|
| 168 |
+
- [Paddle2ONNX](./deploy/paddle2onnx/readme.md)
|
| 169 |
+
- [PaddleCloud](./deploy/paddlecloud/README.md)
|
| 170 |
+
- [Benchmark](./doc/doc_en/benchmark_en.md)
|
| 171 |
+
- [PP-Structure 🔥](./ppstructure/README.md)
|
| 172 |
+
- [Quick Start](./ppstructure/docs/quickstart_en.md)
|
| 173 |
+
- [Model Zoo](./ppstructure/docs/models_list_en.md)
|
| 174 |
+
- [Model training](./doc/doc_en/training_en.md)
|
| 175 |
+
- [Layout Analysis](./ppstructure/layout/README.md)
|
| 176 |
+
- [Table Recognition](./ppstructure/table/README.md)
|
| 177 |
+
- [Key Information Extraction](./ppstructure/kie/README.md)
|
| 178 |
+
- [Inference and Deployment](./deploy/README.md)
|
| 179 |
+
- [Python Inference](./ppstructure/docs/inference_en.md)
|
| 180 |
+
- [C++ Inference](./deploy/cpp_infer/readme.md)
|
| 181 |
+
- [Serving](./deploy/hubserving/readme_en.md)
|
| 182 |
+
- [Academic Algorithms](./doc/doc_en/algorithm_overview_en.md)
|
| 183 |
+
- [Text detection](./doc/doc_en/algorithm_overview_en.md)
|
| 184 |
+
- [Text recognition](./doc/doc_en/algorithm_overview_en.md)
|
| 185 |
+
- [End-to-end OCR](./doc/doc_en/algorithm_overview_en.md)
|
| 186 |
+
- [Table Recognition](./doc/doc_en/algorithm_overview_en.md)
|
| 187 |
+
- [Key Information Extraction](./doc/doc_en/algorithm_overview_en.md)
|
| 188 |
+
- [Add New Algorithms to PaddleOCR](./doc/doc_en/add_new_algorithm_en.md)
|
| 189 |
+
- Data Annotation and Synthesis
|
| 190 |
+
- [Semi-automatic Annotation Tool: PPOCRLabel](./PPOCRLabel/README.md)
|
| 191 |
+
- [Data Synthesis Tool: Style-Text](./StyleText/README.md)
|
| 192 |
+
- [Other Data Annotation Tools](./doc/doc_en/data_annotation_en.md)
|
| 193 |
+
- [Other Data Synthesis Tools](./doc/doc_en/data_synthesis_en.md)
|
| 194 |
+
- Datasets
|
| 195 |
+
- [General OCR Datasets(Chinese/English)](doc/doc_en/dataset/datasets_en.md)
|
| 196 |
+
- [HandWritten_OCR_Datasets(Chinese)](doc/doc_en/dataset/handwritten_datasets_en.md)
|
| 197 |
+
- [Various OCR Datasets(multilingual)](doc/doc_en/dataset/vertical_and_multilingual_datasets_en.md)
|
| 198 |
+
- [Layout Analysis](doc/doc_en/dataset/layout_datasets_en.md)
|
| 199 |
+
- [Table Recognition](doc/doc_en/dataset/table_datasets_en.md)
|
| 200 |
+
- [Key Information Extraction](doc/doc_en/dataset/kie_datasets_en.md)
|
| 201 |
+
- [Code Structure](./doc/doc_en/tree_en.md)
|
| 202 |
+
- [Visualization](#Visualization)
|
| 203 |
+
- [Community](#Community)
|
| 204 |
+
- [New language requests](#language_requests)
|
| 205 |
+
- [FAQ](./doc/doc_en/FAQ_en.md)
|
| 206 |
+
- [References](./doc/doc_en/reference_en.md)
|
| 207 |
+
- [License](#LICENSE)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
<a name="Visualization"></a>
|
| 211 |
+
## 👀 Visualization [more](./doc/doc_en/visualization_en.md)
|
| 212 |
+
|
| 213 |
+
<details open>
|
| 214 |
+
<summary>PP-OCRv3 Chinese model</summary>
|
| 215 |
+
<div align="center">
|
| 216 |
+
<img src="doc/imgs_results/PP-OCRv3/ch/PP-OCRv3-pic001.jpg" width="800">
|
| 217 |
+
<img src="doc/imgs_results/PP-OCRv3/ch/PP-OCRv3-pic002.jpg" width="800">
|
| 218 |
+
<img src="doc/imgs_results/PP-OCRv3/ch/PP-OCRv3-pic003.jpg" width="800">
|
| 219 |
+
</div>
|
| 220 |
+
</details>
|
| 221 |
+
|
| 222 |
+
<details open>
|
| 223 |
+
<summary>PP-OCRv3 English model</summary>
|
| 224 |
+
<div align="center">
|
| 225 |
+
<img src="doc/imgs_results/PP-OCRv3/en/en_1.png" width="800">
|
| 226 |
+
<img src="doc/imgs_results/PP-OCRv3/en/en_2.png" width="800">
|
| 227 |
+
</div>
|
| 228 |
+
</details>
|
| 229 |
+
|
| 230 |
+
<details open>
|
| 231 |
+
<summary>PP-OCRv3 Multilingual model</summary>
|
| 232 |
+
<div align="center">
|
| 233 |
+
<img src="doc/imgs_results/PP-OCRv3/multi_lang/japan_2.jpg" width="800">
|
| 234 |
+
<img src="doc/imgs_results/PP-OCRv3/multi_lang/korean_1.jpg" width="800">
|
| 235 |
+
</div>
|
| 236 |
+
</details>
|
| 237 |
+
|
| 238 |
+
<details open>
|
| 239 |
+
<summary>PP-Structurev2</summary>
|
| 240 |
+
|
| 241 |
+
- layout analysis + table recognition
|
| 242 |
+
<div align="center">
|
| 243 |
+
<img src="./ppstructure/docs/table/ppstructure.GIF" width="800">
|
| 244 |
+
</div>
|
| 245 |
+
|
| 246 |
+
- SER (Semantic entity recognition)
|
| 247 |
+
<div align="center">
|
| 248 |
+
<img src="https://user-images.githubusercontent.com/25809855/186094456-01a1dd11-1433-4437-9ab2-6480ac94ec0a.png" width="600">
|
| 249 |
+
</div>
|
| 250 |
+
|
| 251 |
+
<div align="center">
|
| 252 |
+
<img src="https://user-images.githubusercontent.com/14270174/185310636-6ce02f7c-790d-479f-b163-ea97a5a04808.jpg" width="600">
|
| 253 |
+
</div>
|
| 254 |
+
|
| 255 |
+
<div align="center">
|
| 256 |
+
<img src="https://user-images.githubusercontent.com/14270174/185539517-ccf2372a-f026-4a7c-ad28-c741c770f60a.png" width="600">
|
| 257 |
+
</div>
|
| 258 |
+
|
| 259 |
+
- RE (Relation Extraction)
|
| 260 |
+
<div align="center">
|
| 261 |
+
<img src="https://user-images.githubusercontent.com/25809855/186094813-3a8e16cc-42e5-4982-b9f4-0134dfb5688d.png" width="600">
|
| 262 |
+
</div>
|
| 263 |
+
|
| 264 |
+
<div align="center">
|
| 265 |
+
<img src="https://user-images.githubusercontent.com/14270174/185393805-c67ff571-cf7e-4217-a4b0-8b396c4f22bb.jpg" width="600">
|
| 266 |
+
</div>
|
| 267 |
+
|
| 268 |
+
<div align="center">
|
| 269 |
+
<img src="https://user-images.githubusercontent.com/14270174/185540080-0431e006-9235-4b6d-b63d-0b3c6e1de48f.jpg" width="600">
|
| 270 |
+
</div>
|
| 271 |
+
|
| 272 |
+
</details>
|
| 273 |
+
|
| 274 |
+
<a name="language_requests"></a>
|
| 275 |
+
## 🇺🇳 Guideline for New Language Requests
|
| 276 |
+
|
| 277 |
+
If you want to request a new language support, a PR with 1 following files are needed:
|
| 278 |
+
|
| 279 |
+
1. In folder [ppocr/utils/dict](./ppocr/utils/dict),
|
| 280 |
+
it is necessary to submit the dict text to this path and name it with `{language}_dict.txt` that contains a list of all characters. Please see the format example from other files in that folder.
|
| 281 |
+
|
| 282 |
+
If your language has unique elements, please tell me in advance within any way, such as useful links, wikipedia and so on.
|
| 283 |
+
|
| 284 |
+
More details, please refer to [Multilingual OCR Development Plan](https://github.com/PaddlePaddle/PaddleOCR/issues/1048).
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
<a name="LICENSE"></a>
|
| 288 |
+
## 📄 License
|
| 289 |
+
This project is released under <a href="https://github.com/PaddlePaddle/PaddleOCR/blob/master/LICENSE">Apache 2.0 license</a>
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/__init__.py
ADDED
|
File without changes
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/arabic.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:cd25bfc3c6d745a8a4b4d415321aa5b43d99b61744b50d20e32931811ec7e268
|
| 3 |
+
size 102000
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/chinese_cht.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5ce814960d0cdea1dd647180636babc1cf6a0acf0a9a9019424f4689acedd9ea
|
| 3 |
+
size 7376416
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/cyrillic.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:084768d29859a62b735387fb5946dfe61fb3d844031c7c51c1668d8afbe3b802
|
| 3 |
+
size 56198
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/french.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:525979822591a3447cfc49d943d6f7683508e25543407871c0ed8fed05fd2bd9
|
| 3 |
+
size 773236
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/german.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:525979822591a3447cfc49d943d6f7683508e25543407871c0ed8fed05fd2bd9
|
| 3 |
+
size 773236
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/hindi.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0d519981fc26e2fe934bd25ec9dfe478e082c99063d868008b20996809e13ccc
|
| 3 |
+
size 222356
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/japan.ttc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:11122490a5e3a862015c8894183750de59abf95c3936d63d5978293d92f23dba
|
| 3 |
+
size 3478068
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/kannada.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b337386a8e853ccba53c0c248bd06f025d7667b800ba74c72c66040d67315c6e
|
| 3 |
+
size 797016
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/korean.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0897316bdb2e308cea2841c54940f2ef5707856000aa07910c8bff39a47e40bd
|
| 3 |
+
size 1222780
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/latin.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:1562fe5cbdaacab4a5880d6404ba05245d12f3a4478fe16021e976bc725ce5d5
|
| 3 |
+
size 54948
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/marathi.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0de62f3fe6c7de77d8d1ffbde4980969bd1f20347019ce8136b492e93d7ea9e6
|
| 3 |
+
size 68684
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/nepali.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0d519981fc26e2fe934bd25ec9dfe478e082c99063d868008b20996809e13ccc
|
| 3 |
+
size 222356
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/persian.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:233ec81460a1c2ccd51d3187b1cf27b1dd5a5a8131a7931357aa95b81ab2a8c3
|
| 3 |
+
size 31564
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/simfang.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:521c6f7546b4eb64fa4b0cd604bbd36333a20a57e388c8e2ad2ad07b9e593864
|
| 3 |
+
size 10576012
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/spanish.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3a3e632f80a2918e0536585ce52ecf2f379dc0f6b65b5b88d731ae52f9ac0d54
|
| 3 |
+
size 336452
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/tamil.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b771ac413157f6b1f1a52fb8ff1b56057f4b492fcce385ddd32ca12eee0c73b0
|
| 3 |
+
size 142512
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/telugu.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7f82ab141b77d263f9ea9b31b47faf50c11310f42fce6d9dffeaaa334909bbf9
|
| 3 |
+
size 990048
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/urdu.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b21f221262edff3cf4fd95b92123125d9368a38a382b1d6c9e502fe776b44254
|
| 3 |
+
size 38788
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/fonts/uyghur.ttf
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b21f221262edff3cf4fd95b92123125d9368a38a382b1d6c9e502fe776b44254
|
| 3 |
+
size 38788
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/freeze.txt
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
astor==0.8.1
|
| 2 |
+
attrdict==2.0.1
|
| 3 |
+
babel==2.16.0
|
| 4 |
+
bce-python-sdk==0.9.21
|
| 5 |
+
blinker==1.8.2
|
| 6 |
+
cachetools==5.5.0
|
| 7 |
+
certifi==2024.8.30
|
| 8 |
+
charset-normalizer==3.3.2
|
| 9 |
+
click==8.1.7
|
| 10 |
+
contourpy==1.3.0
|
| 11 |
+
cssselect==1.2.0
|
| 12 |
+
cssutils==2.11.1
|
| 13 |
+
cycler==0.12.1
|
| 14 |
+
Cython==3.0.11
|
| 15 |
+
decorator==5.1.1
|
| 16 |
+
et-xmlfile==1.1.0
|
| 17 |
+
Flask==3.0.3
|
| 18 |
+
flask-babel==4.0.0
|
| 19 |
+
fonttools==4.53.1
|
| 20 |
+
future==1.0.0
|
| 21 |
+
idna==3.8
|
| 22 |
+
imageio==2.35.1
|
| 23 |
+
imgaug==0.4.0
|
| 24 |
+
itsdangerous==2.2.0
|
| 25 |
+
Jinja2==3.1.4
|
| 26 |
+
kiwisolver==1.4.5
|
| 27 |
+
lazy_loader==0.4
|
| 28 |
+
lmdb==1.5.1
|
| 29 |
+
lxml==5.3.0
|
| 30 |
+
MarkupSafe==2.1.5
|
| 31 |
+
matplotlib==3.9.2
|
| 32 |
+
more-itertools==10.4.0
|
| 33 |
+
networkx==3.3
|
| 34 |
+
numpy==1.26.4
|
| 35 |
+
opencv-contrib-python==4.10.0.84
|
| 36 |
+
opencv-python==4.6.0.66
|
| 37 |
+
openpyxl==3.1.5
|
| 38 |
+
opt-einsum==3.3.0
|
| 39 |
+
packaging==24.1
|
| 40 |
+
pillow==10.4.0
|
| 41 |
+
premailer==3.10.0
|
| 42 |
+
psutil==6.0.0
|
| 43 |
+
pyclipper==1.3.0.post5
|
| 44 |
+
pycryptodome==3.20.0
|
| 45 |
+
pyparsing==3.1.4
|
| 46 |
+
python-dateutil==2.9.0.post0
|
| 47 |
+
pytz==2024.1
|
| 48 |
+
rapidfuzz==3.9.7
|
| 49 |
+
rarfile==4.2
|
| 50 |
+
requests==2.32.3
|
| 51 |
+
scikit-image==0.24.0
|
| 52 |
+
scipy==1.14.1
|
| 53 |
+
shapely==2.0.6
|
| 54 |
+
six==1.16.0
|
| 55 |
+
tifffile==2024.8.30
|
| 56 |
+
tqdm==4.66.5
|
| 57 |
+
tzdata==2024.1
|
| 58 |
+
urllib3==2.2.2
|
| 59 |
+
Werkzeug==3.0.4
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/__init__.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
# Copyright 2007 Google Inc. All Rights Reserved.
|
| 32 |
+
|
| 33 |
+
__version__ = '3.20.0'
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/any_pb2.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/any.proto
|
| 4 |
+
"""Generated protocol buffer code."""
|
| 5 |
+
from google.protobuf.internal import builder as _builder
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
# @@protoc_insertion_point(imports)
|
| 10 |
+
|
| 11 |
+
_sym_db = _symbol_database.Default()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 17 |
+
|
| 18 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
| 19 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals())
|
| 20 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 21 |
+
|
| 22 |
+
DESCRIPTOR._options = None
|
| 23 |
+
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 24 |
+
_ANY._serialized_start=46
|
| 25 |
+
_ANY._serialized_end=84
|
| 26 |
+
# @@protoc_insertion_point(module_scope)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/api_pb2.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/api.proto
|
| 4 |
+
"""Generated protocol buffer code."""
|
| 5 |
+
from google.protobuf.internal import builder as _builder
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
# @@protoc_insertion_point(imports)
|
| 10 |
+
|
| 11 |
+
_sym_db = _symbol_database.Default()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
|
| 15 |
+
from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 19 |
+
|
| 20 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
| 21 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals())
|
| 22 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 23 |
+
|
| 24 |
+
DESCRIPTOR._options = None
|
| 25 |
+
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 26 |
+
_API._serialized_start=113
|
| 27 |
+
_API._serialized_end=370
|
| 28 |
+
_METHOD._serialized_start=373
|
| 29 |
+
_METHOD._serialized_end=586
|
| 30 |
+
_MIXIN._serialized_start=588
|
| 31 |
+
_MIXIN._serialized_end=623
|
| 32 |
+
# @@protoc_insertion_point(module_scope)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/compiler/__init__.py
ADDED
|
File without changes
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/compiler/plugin_pb2.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/compiler/plugin.proto
|
| 4 |
+
"""Generated protocol buffer code."""
|
| 5 |
+
from google.protobuf.internal import builder as _builder
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
# @@protoc_insertion_point(imports)
|
| 10 |
+
|
| 11 |
+
_sym_db = _symbol_database.Default()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb')
|
| 18 |
+
|
| 19 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
| 20 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals())
|
| 21 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 22 |
+
|
| 23 |
+
DESCRIPTOR._options = None
|
| 24 |
+
DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb'
|
| 25 |
+
_VERSION._serialized_start=101
|
| 26 |
+
_VERSION._serialized_end=171
|
| 27 |
+
_CODEGENERATORREQUEST._serialized_start=174
|
| 28 |
+
_CODEGENERATORREQUEST._serialized_end=360
|
| 29 |
+
_CODEGENERATORRESPONSE._serialized_start=363
|
| 30 |
+
_CODEGENERATORRESPONSE._serialized_end=684
|
| 31 |
+
_CODEGENERATORRESPONSE_FILE._serialized_start=499
|
| 32 |
+
_CODEGENERATORRESPONSE_FILE._serialized_end=626
|
| 33 |
+
_CODEGENERATORRESPONSE_FEATURE._serialized_start=628
|
| 34 |
+
_CODEGENERATORRESPONSE_FEATURE._serialized_end=684
|
| 35 |
+
# @@protoc_insertion_point(module_scope)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor.py
ADDED
|
@@ -0,0 +1,1224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Descriptors essentially contain exactly the information found in a .proto
|
| 32 |
+
file, in types that make this information accessible in Python.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 36 |
+
|
| 37 |
+
import threading
|
| 38 |
+
import warnings
|
| 39 |
+
|
| 40 |
+
from google.protobuf.internal import api_implementation
|
| 41 |
+
|
| 42 |
+
_USE_C_DESCRIPTORS = False
|
| 43 |
+
if api_implementation.Type() == 'cpp':
|
| 44 |
+
# Used by MakeDescriptor in cpp mode
|
| 45 |
+
import binascii
|
| 46 |
+
import os
|
| 47 |
+
from google.protobuf.pyext import _message
|
| 48 |
+
_USE_C_DESCRIPTORS = True
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class Error(Exception):
|
| 52 |
+
"""Base error for this module."""
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class TypeTransformationError(Error):
|
| 56 |
+
"""Error transforming between python proto type and corresponding C++ type."""
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
if _USE_C_DESCRIPTORS:
|
| 60 |
+
# This metaclass allows to override the behavior of code like
|
| 61 |
+
# isinstance(my_descriptor, FieldDescriptor)
|
| 62 |
+
# and make it return True when the descriptor is an instance of the extension
|
| 63 |
+
# type written in C++.
|
| 64 |
+
class DescriptorMetaclass(type):
|
| 65 |
+
def __instancecheck__(cls, obj):
|
| 66 |
+
if super(DescriptorMetaclass, cls).__instancecheck__(obj):
|
| 67 |
+
return True
|
| 68 |
+
if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
|
| 69 |
+
return True
|
| 70 |
+
return False
|
| 71 |
+
else:
|
| 72 |
+
# The standard metaclass; nothing changes.
|
| 73 |
+
DescriptorMetaclass = type
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class _Lock(object):
|
| 77 |
+
"""Wrapper class of threading.Lock(), which is allowed by 'with'."""
|
| 78 |
+
|
| 79 |
+
def __new__(cls):
|
| 80 |
+
self = object.__new__(cls)
|
| 81 |
+
self._lock = threading.Lock() # pylint: disable=protected-access
|
| 82 |
+
return self
|
| 83 |
+
|
| 84 |
+
def __enter__(self):
|
| 85 |
+
self._lock.acquire()
|
| 86 |
+
|
| 87 |
+
def __exit__(self, exc_type, exc_value, exc_tb):
|
| 88 |
+
self._lock.release()
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
_lock = threading.Lock()
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def _Deprecated(name):
|
| 95 |
+
if _Deprecated.count > 0:
|
| 96 |
+
_Deprecated.count -= 1
|
| 97 |
+
warnings.warn(
|
| 98 |
+
'Call to deprecated create function %s(). Note: Create unlinked '
|
| 99 |
+
'descriptors is going to go away. Please use get/find descriptors from '
|
| 100 |
+
'generated code or query the descriptor_pool.'
|
| 101 |
+
% name,
|
| 102 |
+
category=DeprecationWarning, stacklevel=3)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# Deprecated warnings will print 100 times at most which should be enough for
|
| 106 |
+
# users to notice and do not cause timeout.
|
| 107 |
+
_Deprecated.count = 100
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
_internal_create_key = object()
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class DescriptorBase(metaclass=DescriptorMetaclass):
|
| 114 |
+
|
| 115 |
+
"""Descriptors base class.
|
| 116 |
+
|
| 117 |
+
This class is the base of all descriptor classes. It provides common options
|
| 118 |
+
related functionality.
|
| 119 |
+
|
| 120 |
+
Attributes:
|
| 121 |
+
has_options: True if the descriptor has non-default options. Usually it
|
| 122 |
+
is not necessary to read this -- just call GetOptions() which will
|
| 123 |
+
happily return the default instance. However, it's sometimes useful
|
| 124 |
+
for efficiency, and also useful inside the protobuf implementation to
|
| 125 |
+
avoid some bootstrapping issues.
|
| 126 |
+
"""
|
| 127 |
+
|
| 128 |
+
if _USE_C_DESCRIPTORS:
|
| 129 |
+
# The class, or tuple of classes, that are considered as "virtual
|
| 130 |
+
# subclasses" of this descriptor class.
|
| 131 |
+
_C_DESCRIPTOR_CLASS = ()
|
| 132 |
+
|
| 133 |
+
def __init__(self, options, serialized_options, options_class_name):
|
| 134 |
+
"""Initialize the descriptor given its options message and the name of the
|
| 135 |
+
class of the options message. The name of the class is required in case
|
| 136 |
+
the options message is None and has to be created.
|
| 137 |
+
"""
|
| 138 |
+
self._options = options
|
| 139 |
+
self._options_class_name = options_class_name
|
| 140 |
+
self._serialized_options = serialized_options
|
| 141 |
+
|
| 142 |
+
# Does this descriptor have non-default options?
|
| 143 |
+
self.has_options = (options is not None) or (serialized_options is not None)
|
| 144 |
+
|
| 145 |
+
def _SetOptions(self, options, options_class_name):
|
| 146 |
+
"""Sets the descriptor's options
|
| 147 |
+
|
| 148 |
+
This function is used in generated proto2 files to update descriptor
|
| 149 |
+
options. It must not be used outside proto2.
|
| 150 |
+
"""
|
| 151 |
+
self._options = options
|
| 152 |
+
self._options_class_name = options_class_name
|
| 153 |
+
|
| 154 |
+
# Does this descriptor have non-default options?
|
| 155 |
+
self.has_options = options is not None
|
| 156 |
+
|
| 157 |
+
def GetOptions(self):
|
| 158 |
+
"""Retrieves descriptor options.
|
| 159 |
+
|
| 160 |
+
This method returns the options set or creates the default options for the
|
| 161 |
+
descriptor.
|
| 162 |
+
"""
|
| 163 |
+
if self._options:
|
| 164 |
+
return self._options
|
| 165 |
+
|
| 166 |
+
from google.protobuf import descriptor_pb2
|
| 167 |
+
try:
|
| 168 |
+
options_class = getattr(descriptor_pb2,
|
| 169 |
+
self._options_class_name)
|
| 170 |
+
except AttributeError:
|
| 171 |
+
raise RuntimeError('Unknown options class name %s!' %
|
| 172 |
+
(self._options_class_name))
|
| 173 |
+
|
| 174 |
+
with _lock:
|
| 175 |
+
if self._serialized_options is None:
|
| 176 |
+
self._options = options_class()
|
| 177 |
+
else:
|
| 178 |
+
self._options = _ParseOptions(options_class(),
|
| 179 |
+
self._serialized_options)
|
| 180 |
+
|
| 181 |
+
return self._options
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class _NestedDescriptorBase(DescriptorBase):
|
| 185 |
+
"""Common class for descriptors that can be nested."""
|
| 186 |
+
|
| 187 |
+
def __init__(self, options, options_class_name, name, full_name,
|
| 188 |
+
file, containing_type, serialized_start=None,
|
| 189 |
+
serialized_end=None, serialized_options=None):
|
| 190 |
+
"""Constructor.
|
| 191 |
+
|
| 192 |
+
Args:
|
| 193 |
+
options: Protocol message options or None
|
| 194 |
+
to use default message options.
|
| 195 |
+
options_class_name (str): The class name of the above options.
|
| 196 |
+
name (str): Name of this protocol message type.
|
| 197 |
+
full_name (str): Fully-qualified name of this protocol message type,
|
| 198 |
+
which will include protocol "package" name and the name of any
|
| 199 |
+
enclosing types.
|
| 200 |
+
file (FileDescriptor): Reference to file info.
|
| 201 |
+
containing_type: if provided, this is a nested descriptor, with this
|
| 202 |
+
descriptor as parent, otherwise None.
|
| 203 |
+
serialized_start: The start index (inclusive) in block in the
|
| 204 |
+
file.serialized_pb that describes this descriptor.
|
| 205 |
+
serialized_end: The end index (exclusive) in block in the
|
| 206 |
+
file.serialized_pb that describes this descriptor.
|
| 207 |
+
serialized_options: Protocol message serialized options or None.
|
| 208 |
+
"""
|
| 209 |
+
super(_NestedDescriptorBase, self).__init__(
|
| 210 |
+
options, serialized_options, options_class_name)
|
| 211 |
+
|
| 212 |
+
self.name = name
|
| 213 |
+
# TODO(falk): Add function to calculate full_name instead of having it in
|
| 214 |
+
# memory?
|
| 215 |
+
self.full_name = full_name
|
| 216 |
+
self.file = file
|
| 217 |
+
self.containing_type = containing_type
|
| 218 |
+
|
| 219 |
+
self._serialized_start = serialized_start
|
| 220 |
+
self._serialized_end = serialized_end
|
| 221 |
+
|
| 222 |
+
def CopyToProto(self, proto):
|
| 223 |
+
"""Copies this to the matching proto in descriptor_pb2.
|
| 224 |
+
|
| 225 |
+
Args:
|
| 226 |
+
proto: An empty proto instance from descriptor_pb2.
|
| 227 |
+
|
| 228 |
+
Raises:
|
| 229 |
+
Error: If self couldn't be serialized, due to to few constructor
|
| 230 |
+
arguments.
|
| 231 |
+
"""
|
| 232 |
+
if (self.file is not None and
|
| 233 |
+
self._serialized_start is not None and
|
| 234 |
+
self._serialized_end is not None):
|
| 235 |
+
proto.ParseFromString(self.file.serialized_pb[
|
| 236 |
+
self._serialized_start:self._serialized_end])
|
| 237 |
+
else:
|
| 238 |
+
raise Error('Descriptor does not contain serialization.')
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
class Descriptor(_NestedDescriptorBase):
|
| 242 |
+
|
| 243 |
+
"""Descriptor for a protocol message type.
|
| 244 |
+
|
| 245 |
+
Attributes:
|
| 246 |
+
name (str): Name of this protocol message type.
|
| 247 |
+
full_name (str): Fully-qualified name of this protocol message type,
|
| 248 |
+
which will include protocol "package" name and the name of any
|
| 249 |
+
enclosing types.
|
| 250 |
+
containing_type (Descriptor): Reference to the descriptor of the type
|
| 251 |
+
containing us, or None if this is top-level.
|
| 252 |
+
fields (list[FieldDescriptor]): Field descriptors for all fields in
|
| 253 |
+
this type.
|
| 254 |
+
fields_by_number (dict(int, FieldDescriptor)): Same
|
| 255 |
+
:class:`FieldDescriptor` objects as in :attr:`fields`, but indexed
|
| 256 |
+
by "number" attribute in each FieldDescriptor.
|
| 257 |
+
fields_by_name (dict(str, FieldDescriptor)): Same
|
| 258 |
+
:class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by
|
| 259 |
+
"name" attribute in each :class:`FieldDescriptor`.
|
| 260 |
+
nested_types (list[Descriptor]): Descriptor references
|
| 261 |
+
for all protocol message types nested within this one.
|
| 262 |
+
nested_types_by_name (dict(str, Descriptor)): Same Descriptor
|
| 263 |
+
objects as in :attr:`nested_types`, but indexed by "name" attribute
|
| 264 |
+
in each Descriptor.
|
| 265 |
+
enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references
|
| 266 |
+
for all enums contained within this type.
|
| 267 |
+
enum_types_by_name (dict(str, EnumDescriptor)): Same
|
| 268 |
+
:class:`EnumDescriptor` objects as in :attr:`enum_types`, but
|
| 269 |
+
indexed by "name" attribute in each EnumDescriptor.
|
| 270 |
+
enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping
|
| 271 |
+
from enum value name to :class:`EnumValueDescriptor` for that value.
|
| 272 |
+
extensions (list[FieldDescriptor]): All extensions defined directly
|
| 273 |
+
within this message type (NOT within a nested type).
|
| 274 |
+
extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor
|
| 275 |
+
objects as :attr:`extensions`, but indexed by "name" attribute of each
|
| 276 |
+
FieldDescriptor.
|
| 277 |
+
is_extendable (bool): Does this type define any extension ranges?
|
| 278 |
+
oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields
|
| 279 |
+
in this message.
|
| 280 |
+
oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in
|
| 281 |
+
:attr:`oneofs`, but indexed by "name" attribute.
|
| 282 |
+
file (FileDescriptor): Reference to file descriptor.
|
| 283 |
+
|
| 284 |
+
"""
|
| 285 |
+
|
| 286 |
+
if _USE_C_DESCRIPTORS:
|
| 287 |
+
_C_DESCRIPTOR_CLASS = _message.Descriptor
|
| 288 |
+
|
| 289 |
+
def __new__(
|
| 290 |
+
cls,
|
| 291 |
+
name=None,
|
| 292 |
+
full_name=None,
|
| 293 |
+
filename=None,
|
| 294 |
+
containing_type=None,
|
| 295 |
+
fields=None,
|
| 296 |
+
nested_types=None,
|
| 297 |
+
enum_types=None,
|
| 298 |
+
extensions=None,
|
| 299 |
+
options=None,
|
| 300 |
+
serialized_options=None,
|
| 301 |
+
is_extendable=True,
|
| 302 |
+
extension_ranges=None,
|
| 303 |
+
oneofs=None,
|
| 304 |
+
file=None, # pylint: disable=redefined-builtin
|
| 305 |
+
serialized_start=None,
|
| 306 |
+
serialized_end=None,
|
| 307 |
+
syntax=None,
|
| 308 |
+
create_key=None):
|
| 309 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 310 |
+
return _message.default_pool.FindMessageTypeByName(full_name)
|
| 311 |
+
|
| 312 |
+
# NOTE(tmarek): The file argument redefining a builtin is nothing we can
|
| 313 |
+
# fix right now since we don't know how many clients already rely on the
|
| 314 |
+
# name of the argument.
|
| 315 |
+
def __init__(self, name, full_name, filename, containing_type, fields,
|
| 316 |
+
nested_types, enum_types, extensions, options=None,
|
| 317 |
+
serialized_options=None,
|
| 318 |
+
is_extendable=True, extension_ranges=None, oneofs=None,
|
| 319 |
+
file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
|
| 320 |
+
syntax=None, create_key=None):
|
| 321 |
+
"""Arguments to __init__() are as described in the description
|
| 322 |
+
of Descriptor fields above.
|
| 323 |
+
|
| 324 |
+
Note that filename is an obsolete argument, that is not used anymore.
|
| 325 |
+
Please use file.name to access this as an attribute.
|
| 326 |
+
"""
|
| 327 |
+
if create_key is not _internal_create_key:
|
| 328 |
+
_Deprecated('Descriptor')
|
| 329 |
+
|
| 330 |
+
super(Descriptor, self).__init__(
|
| 331 |
+
options, 'MessageOptions', name, full_name, file,
|
| 332 |
+
containing_type, serialized_start=serialized_start,
|
| 333 |
+
serialized_end=serialized_end, serialized_options=serialized_options)
|
| 334 |
+
|
| 335 |
+
# We have fields in addition to fields_by_name and fields_by_number,
|
| 336 |
+
# so that:
|
| 337 |
+
# 1. Clients can index fields by "order in which they're listed."
|
| 338 |
+
# 2. Clients can easily iterate over all fields with the terse
|
| 339 |
+
# syntax: for f in descriptor.fields: ...
|
| 340 |
+
self.fields = fields
|
| 341 |
+
for field in self.fields:
|
| 342 |
+
field.containing_type = self
|
| 343 |
+
self.fields_by_number = dict((f.number, f) for f in fields)
|
| 344 |
+
self.fields_by_name = dict((f.name, f) for f in fields)
|
| 345 |
+
self._fields_by_camelcase_name = None
|
| 346 |
+
|
| 347 |
+
self.nested_types = nested_types
|
| 348 |
+
for nested_type in nested_types:
|
| 349 |
+
nested_type.containing_type = self
|
| 350 |
+
self.nested_types_by_name = dict((t.name, t) for t in nested_types)
|
| 351 |
+
|
| 352 |
+
self.enum_types = enum_types
|
| 353 |
+
for enum_type in self.enum_types:
|
| 354 |
+
enum_type.containing_type = self
|
| 355 |
+
self.enum_types_by_name = dict((t.name, t) for t in enum_types)
|
| 356 |
+
self.enum_values_by_name = dict(
|
| 357 |
+
(v.name, v) for t in enum_types for v in t.values)
|
| 358 |
+
|
| 359 |
+
self.extensions = extensions
|
| 360 |
+
for extension in self.extensions:
|
| 361 |
+
extension.extension_scope = self
|
| 362 |
+
self.extensions_by_name = dict((f.name, f) for f in extensions)
|
| 363 |
+
self.is_extendable = is_extendable
|
| 364 |
+
self.extension_ranges = extension_ranges
|
| 365 |
+
self.oneofs = oneofs if oneofs is not None else []
|
| 366 |
+
self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
|
| 367 |
+
for oneof in self.oneofs:
|
| 368 |
+
oneof.containing_type = self
|
| 369 |
+
self.syntax = syntax or "proto2"
|
| 370 |
+
|
| 371 |
+
@property
|
| 372 |
+
def fields_by_camelcase_name(self):
|
| 373 |
+
"""Same FieldDescriptor objects as in :attr:`fields`, but indexed by
|
| 374 |
+
:attr:`FieldDescriptor.camelcase_name`.
|
| 375 |
+
"""
|
| 376 |
+
if self._fields_by_camelcase_name is None:
|
| 377 |
+
self._fields_by_camelcase_name = dict(
|
| 378 |
+
(f.camelcase_name, f) for f in self.fields)
|
| 379 |
+
return self._fields_by_camelcase_name
|
| 380 |
+
|
| 381 |
+
def EnumValueName(self, enum, value):
|
| 382 |
+
"""Returns the string name of an enum value.
|
| 383 |
+
|
| 384 |
+
This is just a small helper method to simplify a common operation.
|
| 385 |
+
|
| 386 |
+
Args:
|
| 387 |
+
enum: string name of the Enum.
|
| 388 |
+
value: int, value of the enum.
|
| 389 |
+
|
| 390 |
+
Returns:
|
| 391 |
+
string name of the enum value.
|
| 392 |
+
|
| 393 |
+
Raises:
|
| 394 |
+
KeyError if either the Enum doesn't exist or the value is not a valid
|
| 395 |
+
value for the enum.
|
| 396 |
+
"""
|
| 397 |
+
return self.enum_types_by_name[enum].values_by_number[value].name
|
| 398 |
+
|
| 399 |
+
def CopyToProto(self, proto):
|
| 400 |
+
"""Copies this to a descriptor_pb2.DescriptorProto.
|
| 401 |
+
|
| 402 |
+
Args:
|
| 403 |
+
proto: An empty descriptor_pb2.DescriptorProto.
|
| 404 |
+
"""
|
| 405 |
+
# This function is overridden to give a better doc comment.
|
| 406 |
+
super(Descriptor, self).CopyToProto(proto)
|
| 407 |
+
|
| 408 |
+
|
| 409 |
+
# TODO(robinson): We should have aggressive checking here,
|
| 410 |
+
# for example:
|
| 411 |
+
# * If you specify a repeated field, you should not be allowed
|
| 412 |
+
# to specify a default value.
|
| 413 |
+
# * [Other examples here as needed].
|
| 414 |
+
#
|
| 415 |
+
# TODO(robinson): for this and other *Descriptor classes, we
|
| 416 |
+
# might also want to lock things down aggressively (e.g.,
|
| 417 |
+
# prevent clients from setting the attributes). Having
|
| 418 |
+
# stronger invariants here in general will reduce the number
|
| 419 |
+
# of runtime checks we must do in reflection.py...
|
| 420 |
+
class FieldDescriptor(DescriptorBase):
|
| 421 |
+
|
| 422 |
+
"""Descriptor for a single field in a .proto file.
|
| 423 |
+
|
| 424 |
+
Attributes:
|
| 425 |
+
name (str): Name of this field, exactly as it appears in .proto.
|
| 426 |
+
full_name (str): Name of this field, including containing scope. This is
|
| 427 |
+
particularly relevant for extensions.
|
| 428 |
+
index (int): Dense, 0-indexed index giving the order that this
|
| 429 |
+
field textually appears within its message in the .proto file.
|
| 430 |
+
number (int): Tag number declared for this field in the .proto file.
|
| 431 |
+
|
| 432 |
+
type (int): (One of the TYPE_* constants below) Declared type.
|
| 433 |
+
cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to
|
| 434 |
+
represent this field.
|
| 435 |
+
|
| 436 |
+
label (int): (One of the LABEL_* constants below) Tells whether this
|
| 437 |
+
field is optional, required, or repeated.
|
| 438 |
+
has_default_value (bool): True if this field has a default value defined,
|
| 439 |
+
otherwise false.
|
| 440 |
+
default_value (Varies): Default value of this field. Only
|
| 441 |
+
meaningful for non-repeated scalar fields. Repeated fields
|
| 442 |
+
should always set this to [], and non-repeated composite
|
| 443 |
+
fields should always set this to None.
|
| 444 |
+
|
| 445 |
+
containing_type (Descriptor): Descriptor of the protocol message
|
| 446 |
+
type that contains this field. Set by the Descriptor constructor
|
| 447 |
+
if we're passed into one.
|
| 448 |
+
Somewhat confusingly, for extension fields, this is the
|
| 449 |
+
descriptor of the EXTENDED message, not the descriptor
|
| 450 |
+
of the message containing this field. (See is_extension and
|
| 451 |
+
extension_scope below).
|
| 452 |
+
message_type (Descriptor): If a composite field, a descriptor
|
| 453 |
+
of the message type contained in this field. Otherwise, this is None.
|
| 454 |
+
enum_type (EnumDescriptor): If this field contains an enum, a
|
| 455 |
+
descriptor of that enum. Otherwise, this is None.
|
| 456 |
+
|
| 457 |
+
is_extension: True iff this describes an extension field.
|
| 458 |
+
extension_scope (Descriptor): Only meaningful if is_extension is True.
|
| 459 |
+
Gives the message that immediately contains this extension field.
|
| 460 |
+
Will be None iff we're a top-level (file-level) extension field.
|
| 461 |
+
|
| 462 |
+
options (descriptor_pb2.FieldOptions): Protocol message field options or
|
| 463 |
+
None to use default field options.
|
| 464 |
+
|
| 465 |
+
containing_oneof (OneofDescriptor): If the field is a member of a oneof
|
| 466 |
+
union, contains its descriptor. Otherwise, None.
|
| 467 |
+
|
| 468 |
+
file (FileDescriptor): Reference to file descriptor.
|
| 469 |
+
"""
|
| 470 |
+
|
| 471 |
+
# Must be consistent with C++ FieldDescriptor::Type enum in
|
| 472 |
+
# descriptor.h.
|
| 473 |
+
#
|
| 474 |
+
# TODO(robinson): Find a way to eliminate this repetition.
|
| 475 |
+
TYPE_DOUBLE = 1
|
| 476 |
+
TYPE_FLOAT = 2
|
| 477 |
+
TYPE_INT64 = 3
|
| 478 |
+
TYPE_UINT64 = 4
|
| 479 |
+
TYPE_INT32 = 5
|
| 480 |
+
TYPE_FIXED64 = 6
|
| 481 |
+
TYPE_FIXED32 = 7
|
| 482 |
+
TYPE_BOOL = 8
|
| 483 |
+
TYPE_STRING = 9
|
| 484 |
+
TYPE_GROUP = 10
|
| 485 |
+
TYPE_MESSAGE = 11
|
| 486 |
+
TYPE_BYTES = 12
|
| 487 |
+
TYPE_UINT32 = 13
|
| 488 |
+
TYPE_ENUM = 14
|
| 489 |
+
TYPE_SFIXED32 = 15
|
| 490 |
+
TYPE_SFIXED64 = 16
|
| 491 |
+
TYPE_SINT32 = 17
|
| 492 |
+
TYPE_SINT64 = 18
|
| 493 |
+
MAX_TYPE = 18
|
| 494 |
+
|
| 495 |
+
# Must be consistent with C++ FieldDescriptor::CppType enum in
|
| 496 |
+
# descriptor.h.
|
| 497 |
+
#
|
| 498 |
+
# TODO(robinson): Find a way to eliminate this repetition.
|
| 499 |
+
CPPTYPE_INT32 = 1
|
| 500 |
+
CPPTYPE_INT64 = 2
|
| 501 |
+
CPPTYPE_UINT32 = 3
|
| 502 |
+
CPPTYPE_UINT64 = 4
|
| 503 |
+
CPPTYPE_DOUBLE = 5
|
| 504 |
+
CPPTYPE_FLOAT = 6
|
| 505 |
+
CPPTYPE_BOOL = 7
|
| 506 |
+
CPPTYPE_ENUM = 8
|
| 507 |
+
CPPTYPE_STRING = 9
|
| 508 |
+
CPPTYPE_MESSAGE = 10
|
| 509 |
+
MAX_CPPTYPE = 10
|
| 510 |
+
|
| 511 |
+
_PYTHON_TO_CPP_PROTO_TYPE_MAP = {
|
| 512 |
+
TYPE_DOUBLE: CPPTYPE_DOUBLE,
|
| 513 |
+
TYPE_FLOAT: CPPTYPE_FLOAT,
|
| 514 |
+
TYPE_ENUM: CPPTYPE_ENUM,
|
| 515 |
+
TYPE_INT64: CPPTYPE_INT64,
|
| 516 |
+
TYPE_SINT64: CPPTYPE_INT64,
|
| 517 |
+
TYPE_SFIXED64: CPPTYPE_INT64,
|
| 518 |
+
TYPE_UINT64: CPPTYPE_UINT64,
|
| 519 |
+
TYPE_FIXED64: CPPTYPE_UINT64,
|
| 520 |
+
TYPE_INT32: CPPTYPE_INT32,
|
| 521 |
+
TYPE_SFIXED32: CPPTYPE_INT32,
|
| 522 |
+
TYPE_SINT32: CPPTYPE_INT32,
|
| 523 |
+
TYPE_UINT32: CPPTYPE_UINT32,
|
| 524 |
+
TYPE_FIXED32: CPPTYPE_UINT32,
|
| 525 |
+
TYPE_BYTES: CPPTYPE_STRING,
|
| 526 |
+
TYPE_STRING: CPPTYPE_STRING,
|
| 527 |
+
TYPE_BOOL: CPPTYPE_BOOL,
|
| 528 |
+
TYPE_MESSAGE: CPPTYPE_MESSAGE,
|
| 529 |
+
TYPE_GROUP: CPPTYPE_MESSAGE
|
| 530 |
+
}
|
| 531 |
+
|
| 532 |
+
# Must be consistent with C++ FieldDescriptor::Label enum in
|
| 533 |
+
# descriptor.h.
|
| 534 |
+
#
|
| 535 |
+
# TODO(robinson): Find a way to eliminate this repetition.
|
| 536 |
+
LABEL_OPTIONAL = 1
|
| 537 |
+
LABEL_REQUIRED = 2
|
| 538 |
+
LABEL_REPEATED = 3
|
| 539 |
+
MAX_LABEL = 3
|
| 540 |
+
|
| 541 |
+
# Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
|
| 542 |
+
# and kLastReservedNumber in descriptor.h
|
| 543 |
+
MAX_FIELD_NUMBER = (1 << 29) - 1
|
| 544 |
+
FIRST_RESERVED_FIELD_NUMBER = 19000
|
| 545 |
+
LAST_RESERVED_FIELD_NUMBER = 19999
|
| 546 |
+
|
| 547 |
+
if _USE_C_DESCRIPTORS:
|
| 548 |
+
_C_DESCRIPTOR_CLASS = _message.FieldDescriptor
|
| 549 |
+
|
| 550 |
+
def __new__(cls, name, full_name, index, number, type, cpp_type, label,
|
| 551 |
+
default_value, message_type, enum_type, containing_type,
|
| 552 |
+
is_extension, extension_scope, options=None,
|
| 553 |
+
serialized_options=None,
|
| 554 |
+
has_default_value=True, containing_oneof=None, json_name=None,
|
| 555 |
+
file=None, create_key=None): # pylint: disable=redefined-builtin
|
| 556 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 557 |
+
if is_extension:
|
| 558 |
+
return _message.default_pool.FindExtensionByName(full_name)
|
| 559 |
+
else:
|
| 560 |
+
return _message.default_pool.FindFieldByName(full_name)
|
| 561 |
+
|
| 562 |
+
def __init__(self, name, full_name, index, number, type, cpp_type, label,
|
| 563 |
+
default_value, message_type, enum_type, containing_type,
|
| 564 |
+
is_extension, extension_scope, options=None,
|
| 565 |
+
serialized_options=None,
|
| 566 |
+
has_default_value=True, containing_oneof=None, json_name=None,
|
| 567 |
+
file=None, create_key=None): # pylint: disable=redefined-builtin
|
| 568 |
+
"""The arguments are as described in the description of FieldDescriptor
|
| 569 |
+
attributes above.
|
| 570 |
+
|
| 571 |
+
Note that containing_type may be None, and may be set later if necessary
|
| 572 |
+
(to deal with circular references between message types, for example).
|
| 573 |
+
Likewise for extension_scope.
|
| 574 |
+
"""
|
| 575 |
+
if create_key is not _internal_create_key:
|
| 576 |
+
_Deprecated('FieldDescriptor')
|
| 577 |
+
|
| 578 |
+
super(FieldDescriptor, self).__init__(
|
| 579 |
+
options, serialized_options, 'FieldOptions')
|
| 580 |
+
self.name = name
|
| 581 |
+
self.full_name = full_name
|
| 582 |
+
self.file = file
|
| 583 |
+
self._camelcase_name = None
|
| 584 |
+
if json_name is None:
|
| 585 |
+
self.json_name = _ToJsonName(name)
|
| 586 |
+
else:
|
| 587 |
+
self.json_name = json_name
|
| 588 |
+
self.index = index
|
| 589 |
+
self.number = number
|
| 590 |
+
self.type = type
|
| 591 |
+
self.cpp_type = cpp_type
|
| 592 |
+
self.label = label
|
| 593 |
+
self.has_default_value = has_default_value
|
| 594 |
+
self.default_value = default_value
|
| 595 |
+
self.containing_type = containing_type
|
| 596 |
+
self.message_type = message_type
|
| 597 |
+
self.enum_type = enum_type
|
| 598 |
+
self.is_extension = is_extension
|
| 599 |
+
self.extension_scope = extension_scope
|
| 600 |
+
self.containing_oneof = containing_oneof
|
| 601 |
+
if api_implementation.Type() == 'cpp':
|
| 602 |
+
if is_extension:
|
| 603 |
+
self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
|
| 604 |
+
else:
|
| 605 |
+
self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
|
| 606 |
+
else:
|
| 607 |
+
self._cdescriptor = None
|
| 608 |
+
|
| 609 |
+
@property
|
| 610 |
+
def camelcase_name(self):
|
| 611 |
+
"""Camelcase name of this field.
|
| 612 |
+
|
| 613 |
+
Returns:
|
| 614 |
+
str: the name in CamelCase.
|
| 615 |
+
"""
|
| 616 |
+
if self._camelcase_name is None:
|
| 617 |
+
self._camelcase_name = _ToCamelCase(self.name)
|
| 618 |
+
return self._camelcase_name
|
| 619 |
+
|
| 620 |
+
@property
|
| 621 |
+
def has_presence(self):
|
| 622 |
+
"""Whether the field distinguishes between unpopulated and default values.
|
| 623 |
+
|
| 624 |
+
Raises:
|
| 625 |
+
RuntimeError: singular field that is not linked with message nor file.
|
| 626 |
+
"""
|
| 627 |
+
if self.label == FieldDescriptor.LABEL_REPEATED:
|
| 628 |
+
return False
|
| 629 |
+
if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or
|
| 630 |
+
self.containing_oneof):
|
| 631 |
+
return True
|
| 632 |
+
if hasattr(self.file, 'syntax'):
|
| 633 |
+
return self.file.syntax == 'proto2'
|
| 634 |
+
if hasattr(self.message_type, 'syntax'):
|
| 635 |
+
return self.message_type.syntax == 'proto2'
|
| 636 |
+
raise RuntimeError(
|
| 637 |
+
'has_presence is not ready to use because field %s is not'
|
| 638 |
+
' linked with message type nor file' % self.full_name)
|
| 639 |
+
|
| 640 |
+
@staticmethod
|
| 641 |
+
def ProtoTypeToCppProtoType(proto_type):
|
| 642 |
+
"""Converts from a Python proto type to a C++ Proto Type.
|
| 643 |
+
|
| 644 |
+
The Python ProtocolBuffer classes specify both the 'Python' datatype and the
|
| 645 |
+
'C++' datatype - and they're not the same. This helper method should
|
| 646 |
+
translate from one to another.
|
| 647 |
+
|
| 648 |
+
Args:
|
| 649 |
+
proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
|
| 650 |
+
Returns:
|
| 651 |
+
int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
|
| 652 |
+
Raises:
|
| 653 |
+
TypeTransformationError: when the Python proto type isn't known.
|
| 654 |
+
"""
|
| 655 |
+
try:
|
| 656 |
+
return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
|
| 657 |
+
except KeyError:
|
| 658 |
+
raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
class EnumDescriptor(_NestedDescriptorBase):
|
| 662 |
+
|
| 663 |
+
"""Descriptor for an enum defined in a .proto file.
|
| 664 |
+
|
| 665 |
+
Attributes:
|
| 666 |
+
name (str): Name of the enum type.
|
| 667 |
+
full_name (str): Full name of the type, including package name
|
| 668 |
+
and any enclosing type(s).
|
| 669 |
+
|
| 670 |
+
values (list[EnumValueDescriptor]): List of the values
|
| 671 |
+
in this enum.
|
| 672 |
+
values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`,
|
| 673 |
+
but indexed by the "name" field of each EnumValueDescriptor.
|
| 674 |
+
values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`,
|
| 675 |
+
but indexed by the "number" field of each EnumValueDescriptor.
|
| 676 |
+
containing_type (Descriptor): Descriptor of the immediate containing
|
| 677 |
+
type of this enum, or None if this is an enum defined at the
|
| 678 |
+
top level in a .proto file. Set by Descriptor's constructor
|
| 679 |
+
if we're passed into one.
|
| 680 |
+
file (FileDescriptor): Reference to file descriptor.
|
| 681 |
+
options (descriptor_pb2.EnumOptions): Enum options message or
|
| 682 |
+
None to use default enum options.
|
| 683 |
+
"""
|
| 684 |
+
|
| 685 |
+
if _USE_C_DESCRIPTORS:
|
| 686 |
+
_C_DESCRIPTOR_CLASS = _message.EnumDescriptor
|
| 687 |
+
|
| 688 |
+
def __new__(cls, name, full_name, filename, values,
|
| 689 |
+
containing_type=None, options=None,
|
| 690 |
+
serialized_options=None, file=None, # pylint: disable=redefined-builtin
|
| 691 |
+
serialized_start=None, serialized_end=None, create_key=None):
|
| 692 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 693 |
+
return _message.default_pool.FindEnumTypeByName(full_name)
|
| 694 |
+
|
| 695 |
+
def __init__(self, name, full_name, filename, values,
|
| 696 |
+
containing_type=None, options=None,
|
| 697 |
+
serialized_options=None, file=None, # pylint: disable=redefined-builtin
|
| 698 |
+
serialized_start=None, serialized_end=None, create_key=None):
|
| 699 |
+
"""Arguments are as described in the attribute description above.
|
| 700 |
+
|
| 701 |
+
Note that filename is an obsolete argument, that is not used anymore.
|
| 702 |
+
Please use file.name to access this as an attribute.
|
| 703 |
+
"""
|
| 704 |
+
if create_key is not _internal_create_key:
|
| 705 |
+
_Deprecated('EnumDescriptor')
|
| 706 |
+
|
| 707 |
+
super(EnumDescriptor, self).__init__(
|
| 708 |
+
options, 'EnumOptions', name, full_name, file,
|
| 709 |
+
containing_type, serialized_start=serialized_start,
|
| 710 |
+
serialized_end=serialized_end, serialized_options=serialized_options)
|
| 711 |
+
|
| 712 |
+
self.values = values
|
| 713 |
+
for value in self.values:
|
| 714 |
+
value.type = self
|
| 715 |
+
self.values_by_name = dict((v.name, v) for v in values)
|
| 716 |
+
# Values are reversed to ensure that the first alias is retained.
|
| 717 |
+
self.values_by_number = dict((v.number, v) for v in reversed(values))
|
| 718 |
+
|
| 719 |
+
def CopyToProto(self, proto):
|
| 720 |
+
"""Copies this to a descriptor_pb2.EnumDescriptorProto.
|
| 721 |
+
|
| 722 |
+
Args:
|
| 723 |
+
proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto.
|
| 724 |
+
"""
|
| 725 |
+
# This function is overridden to give a better doc comment.
|
| 726 |
+
super(EnumDescriptor, self).CopyToProto(proto)
|
| 727 |
+
|
| 728 |
+
|
| 729 |
+
class EnumValueDescriptor(DescriptorBase):
|
| 730 |
+
|
| 731 |
+
"""Descriptor for a single value within an enum.
|
| 732 |
+
|
| 733 |
+
Attributes:
|
| 734 |
+
name (str): Name of this value.
|
| 735 |
+
index (int): Dense, 0-indexed index giving the order that this
|
| 736 |
+
value appears textually within its enum in the .proto file.
|
| 737 |
+
number (int): Actual number assigned to this enum value.
|
| 738 |
+
type (EnumDescriptor): :class:`EnumDescriptor` to which this value
|
| 739 |
+
belongs. Set by :class:`EnumDescriptor`'s constructor if we're
|
| 740 |
+
passed into one.
|
| 741 |
+
options (descriptor_pb2.EnumValueOptions): Enum value options message or
|
| 742 |
+
None to use default enum value options options.
|
| 743 |
+
"""
|
| 744 |
+
|
| 745 |
+
if _USE_C_DESCRIPTORS:
|
| 746 |
+
_C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
|
| 747 |
+
|
| 748 |
+
def __new__(cls, name, index, number,
|
| 749 |
+
type=None, # pylint: disable=redefined-builtin
|
| 750 |
+
options=None, serialized_options=None, create_key=None):
|
| 751 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 752 |
+
# There is no way we can build a complete EnumValueDescriptor with the
|
| 753 |
+
# given parameters (the name of the Enum is not known, for example).
|
| 754 |
+
# Fortunately generated files just pass it to the EnumDescriptor()
|
| 755 |
+
# constructor, which will ignore it, so returning None is good enough.
|
| 756 |
+
return None
|
| 757 |
+
|
| 758 |
+
def __init__(self, name, index, number,
|
| 759 |
+
type=None, # pylint: disable=redefined-builtin
|
| 760 |
+
options=None, serialized_options=None, create_key=None):
|
| 761 |
+
"""Arguments are as described in the attribute description above."""
|
| 762 |
+
if create_key is not _internal_create_key:
|
| 763 |
+
_Deprecated('EnumValueDescriptor')
|
| 764 |
+
|
| 765 |
+
super(EnumValueDescriptor, self).__init__(
|
| 766 |
+
options, serialized_options, 'EnumValueOptions')
|
| 767 |
+
self.name = name
|
| 768 |
+
self.index = index
|
| 769 |
+
self.number = number
|
| 770 |
+
self.type = type
|
| 771 |
+
|
| 772 |
+
|
| 773 |
+
class OneofDescriptor(DescriptorBase):
|
| 774 |
+
"""Descriptor for a oneof field.
|
| 775 |
+
|
| 776 |
+
Attributes:
|
| 777 |
+
name (str): Name of the oneof field.
|
| 778 |
+
full_name (str): Full name of the oneof field, including package name.
|
| 779 |
+
index (int): 0-based index giving the order of the oneof field inside
|
| 780 |
+
its containing type.
|
| 781 |
+
containing_type (Descriptor): :class:`Descriptor` of the protocol message
|
| 782 |
+
type that contains this field. Set by the :class:`Descriptor` constructor
|
| 783 |
+
if we're passed into one.
|
| 784 |
+
fields (list[FieldDescriptor]): The list of field descriptors this
|
| 785 |
+
oneof can contain.
|
| 786 |
+
"""
|
| 787 |
+
|
| 788 |
+
if _USE_C_DESCRIPTORS:
|
| 789 |
+
_C_DESCRIPTOR_CLASS = _message.OneofDescriptor
|
| 790 |
+
|
| 791 |
+
def __new__(
|
| 792 |
+
cls, name, full_name, index, containing_type, fields, options=None,
|
| 793 |
+
serialized_options=None, create_key=None):
|
| 794 |
+
_message.Message._CheckCalledFromGeneratedFile()
|
| 795 |
+
return _message.default_pool.FindOneofByName(full_name)
|
| 796 |
+
|
| 797 |
+
def __init__(
|
| 798 |
+
self, name, full_name, index, containing_type, fields, options=None,
|
| 799 |
+
serialized_options=None, create_key=None):
|
| 800 |
+
"""Arguments are as described in the attribute description above."""
|
| 801 |
+
if create_key is not _internal_create_key:
|
| 802 |
+
_Deprecated('OneofDescriptor')
|
| 803 |
+
|
| 804 |
+
super(OneofDescriptor, self).__init__(
|
| 805 |
+
options, serialized_options, 'OneofOptions')
|
| 806 |
+
self.name = name
|
| 807 |
+
self.full_name = full_name
|
| 808 |
+
self.index = index
|
| 809 |
+
self.containing_type = containing_type
|
| 810 |
+
self.fields = fields
|
| 811 |
+
|
| 812 |
+
|
| 813 |
+
class ServiceDescriptor(_NestedDescriptorBase):
|
| 814 |
+
|
| 815 |
+
"""Descriptor for a service.
|
| 816 |
+
|
| 817 |
+
Attributes:
|
| 818 |
+
name (str): Name of the service.
|
| 819 |
+
full_name (str): Full name of the service, including package name.
|
| 820 |
+
index (int): 0-indexed index giving the order that this services
|
| 821 |
+
definition appears within the .proto file.
|
| 822 |
+
methods (list[MethodDescriptor]): List of methods provided by this
|
| 823 |
+
service.
|
| 824 |
+
methods_by_name (dict(str, MethodDescriptor)): Same
|
| 825 |
+
:class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but
|
| 826 |
+
indexed by "name" attribute in each :class:`MethodDescriptor`.
|
| 827 |
+
options (descriptor_pb2.ServiceOptions): Service options message or
|
| 828 |
+
None to use default service options.
|
| 829 |
+
file (FileDescriptor): Reference to file info.
|
| 830 |
+
"""
|
| 831 |
+
|
| 832 |
+
if _USE_C_DESCRIPTORS:
|
| 833 |
+
_C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
|
| 834 |
+
|
| 835 |
+
def __new__(
|
| 836 |
+
cls,
|
| 837 |
+
name=None,
|
| 838 |
+
full_name=None,
|
| 839 |
+
index=None,
|
| 840 |
+
methods=None,
|
| 841 |
+
options=None,
|
| 842 |
+
serialized_options=None,
|
| 843 |
+
file=None, # pylint: disable=redefined-builtin
|
| 844 |
+
serialized_start=None,
|
| 845 |
+
serialized_end=None,
|
| 846 |
+
create_key=None):
|
| 847 |
+
_message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
|
| 848 |
+
return _message.default_pool.FindServiceByName(full_name)
|
| 849 |
+
|
| 850 |
+
def __init__(self, name, full_name, index, methods, options=None,
|
| 851 |
+
serialized_options=None, file=None, # pylint: disable=redefined-builtin
|
| 852 |
+
serialized_start=None, serialized_end=None, create_key=None):
|
| 853 |
+
if create_key is not _internal_create_key:
|
| 854 |
+
_Deprecated('ServiceDescriptor')
|
| 855 |
+
|
| 856 |
+
super(ServiceDescriptor, self).__init__(
|
| 857 |
+
options, 'ServiceOptions', name, full_name, file,
|
| 858 |
+
None, serialized_start=serialized_start,
|
| 859 |
+
serialized_end=serialized_end, serialized_options=serialized_options)
|
| 860 |
+
self.index = index
|
| 861 |
+
self.methods = methods
|
| 862 |
+
self.methods_by_name = dict((m.name, m) for m in methods)
|
| 863 |
+
# Set the containing service for each method in this service.
|
| 864 |
+
for method in self.methods:
|
| 865 |
+
method.containing_service = self
|
| 866 |
+
|
| 867 |
+
def FindMethodByName(self, name):
|
| 868 |
+
"""Searches for the specified method, and returns its descriptor.
|
| 869 |
+
|
| 870 |
+
Args:
|
| 871 |
+
name (str): Name of the method.
|
| 872 |
+
Returns:
|
| 873 |
+
MethodDescriptor or None: the descriptor for the requested method, if
|
| 874 |
+
found.
|
| 875 |
+
"""
|
| 876 |
+
return self.methods_by_name.get(name, None)
|
| 877 |
+
|
| 878 |
+
def CopyToProto(self, proto):
|
| 879 |
+
"""Copies this to a descriptor_pb2.ServiceDescriptorProto.
|
| 880 |
+
|
| 881 |
+
Args:
|
| 882 |
+
proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto.
|
| 883 |
+
"""
|
| 884 |
+
# This function is overridden to give a better doc comment.
|
| 885 |
+
super(ServiceDescriptor, self).CopyToProto(proto)
|
| 886 |
+
|
| 887 |
+
|
| 888 |
+
class MethodDescriptor(DescriptorBase):
|
| 889 |
+
|
| 890 |
+
"""Descriptor for a method in a service.
|
| 891 |
+
|
| 892 |
+
Attributes:
|
| 893 |
+
name (str): Name of the method within the service.
|
| 894 |
+
full_name (str): Full name of method.
|
| 895 |
+
index (int): 0-indexed index of the method inside the service.
|
| 896 |
+
containing_service (ServiceDescriptor): The service that contains this
|
| 897 |
+
method.
|
| 898 |
+
input_type (Descriptor): The descriptor of the message that this method
|
| 899 |
+
accepts.
|
| 900 |
+
output_type (Descriptor): The descriptor of the message that this method
|
| 901 |
+
returns.
|
| 902 |
+
client_streaming (bool): Whether this method uses client streaming.
|
| 903 |
+
server_streaming (bool): Whether this method uses server streaming.
|
| 904 |
+
options (descriptor_pb2.MethodOptions or None): Method options message, or
|
| 905 |
+
None to use default method options.
|
| 906 |
+
"""
|
| 907 |
+
|
| 908 |
+
if _USE_C_DESCRIPTORS:
|
| 909 |
+
_C_DESCRIPTOR_CLASS = _message.MethodDescriptor
|
| 910 |
+
|
| 911 |
+
def __new__(cls,
|
| 912 |
+
name,
|
| 913 |
+
full_name,
|
| 914 |
+
index,
|
| 915 |
+
containing_service,
|
| 916 |
+
input_type,
|
| 917 |
+
output_type,
|
| 918 |
+
client_streaming=False,
|
| 919 |
+
server_streaming=False,
|
| 920 |
+
options=None,
|
| 921 |
+
serialized_options=None,
|
| 922 |
+
create_key=None):
|
| 923 |
+
_message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
|
| 924 |
+
return _message.default_pool.FindMethodByName(full_name)
|
| 925 |
+
|
| 926 |
+
def __init__(self,
|
| 927 |
+
name,
|
| 928 |
+
full_name,
|
| 929 |
+
index,
|
| 930 |
+
containing_service,
|
| 931 |
+
input_type,
|
| 932 |
+
output_type,
|
| 933 |
+
client_streaming=False,
|
| 934 |
+
server_streaming=False,
|
| 935 |
+
options=None,
|
| 936 |
+
serialized_options=None,
|
| 937 |
+
create_key=None):
|
| 938 |
+
"""The arguments are as described in the description of MethodDescriptor
|
| 939 |
+
attributes above.
|
| 940 |
+
|
| 941 |
+
Note that containing_service may be None, and may be set later if necessary.
|
| 942 |
+
"""
|
| 943 |
+
if create_key is not _internal_create_key:
|
| 944 |
+
_Deprecated('MethodDescriptor')
|
| 945 |
+
|
| 946 |
+
super(MethodDescriptor, self).__init__(
|
| 947 |
+
options, serialized_options, 'MethodOptions')
|
| 948 |
+
self.name = name
|
| 949 |
+
self.full_name = full_name
|
| 950 |
+
self.index = index
|
| 951 |
+
self.containing_service = containing_service
|
| 952 |
+
self.input_type = input_type
|
| 953 |
+
self.output_type = output_type
|
| 954 |
+
self.client_streaming = client_streaming
|
| 955 |
+
self.server_streaming = server_streaming
|
| 956 |
+
|
| 957 |
+
def CopyToProto(self, proto):
|
| 958 |
+
"""Copies this to a descriptor_pb2.MethodDescriptorProto.
|
| 959 |
+
|
| 960 |
+
Args:
|
| 961 |
+
proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto.
|
| 962 |
+
|
| 963 |
+
Raises:
|
| 964 |
+
Error: If self couldn't be serialized, due to too few constructor
|
| 965 |
+
arguments.
|
| 966 |
+
"""
|
| 967 |
+
if self.containing_service is not None:
|
| 968 |
+
from google.protobuf import descriptor_pb2
|
| 969 |
+
service_proto = descriptor_pb2.ServiceDescriptorProto()
|
| 970 |
+
self.containing_service.CopyToProto(service_proto)
|
| 971 |
+
proto.CopyFrom(service_proto.method[self.index])
|
| 972 |
+
else:
|
| 973 |
+
raise Error('Descriptor does not contain a service.')
|
| 974 |
+
|
| 975 |
+
|
| 976 |
+
class FileDescriptor(DescriptorBase):
|
| 977 |
+
"""Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
|
| 978 |
+
|
| 979 |
+
Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and
|
| 980 |
+
:attr:`dependencies` fields are only set by the
|
| 981 |
+
:py:mod:`google.protobuf.message_factory` module, and not by the generated
|
| 982 |
+
proto code.
|
| 983 |
+
|
| 984 |
+
Attributes:
|
| 985 |
+
name (str): Name of file, relative to root of source tree.
|
| 986 |
+
package (str): Name of the package
|
| 987 |
+
syntax (str): string indicating syntax of the file (can be "proto2" or
|
| 988 |
+
"proto3")
|
| 989 |
+
serialized_pb (bytes): Byte string of serialized
|
| 990 |
+
:class:`descriptor_pb2.FileDescriptorProto`.
|
| 991 |
+
dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor`
|
| 992 |
+
objects this :class:`FileDescriptor` depends on.
|
| 993 |
+
public_dependencies (list[FileDescriptor]): A subset of
|
| 994 |
+
:attr:`dependencies`, which were declared as "public".
|
| 995 |
+
message_types_by_name (dict(str, Descriptor)): Mapping from message names
|
| 996 |
+
to their :class:`Descriptor`.
|
| 997 |
+
enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to
|
| 998 |
+
their :class:`EnumDescriptor`.
|
| 999 |
+
extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension
|
| 1000 |
+
names declared at file scope to their :class:`FieldDescriptor`.
|
| 1001 |
+
services_by_name (dict(str, ServiceDescriptor)): Mapping from services'
|
| 1002 |
+
names to their :class:`ServiceDescriptor`.
|
| 1003 |
+
pool (DescriptorPool): The pool this descriptor belongs to. When not
|
| 1004 |
+
passed to the constructor, the global default pool is used.
|
| 1005 |
+
"""
|
| 1006 |
+
|
| 1007 |
+
if _USE_C_DESCRIPTORS:
|
| 1008 |
+
_C_DESCRIPTOR_CLASS = _message.FileDescriptor
|
| 1009 |
+
|
| 1010 |
+
def __new__(cls, name, package, options=None,
|
| 1011 |
+
serialized_options=None, serialized_pb=None,
|
| 1012 |
+
dependencies=None, public_dependencies=None,
|
| 1013 |
+
syntax=None, pool=None, create_key=None):
|
| 1014 |
+
# FileDescriptor() is called from various places, not only from generated
|
| 1015 |
+
# files, to register dynamic proto files and messages.
|
| 1016 |
+
# pylint: disable=g-explicit-bool-comparison
|
| 1017 |
+
if serialized_pb == b'':
|
| 1018 |
+
# Cpp generated code must be linked in if serialized_pb is ''
|
| 1019 |
+
try:
|
| 1020 |
+
return _message.default_pool.FindFileByName(name)
|
| 1021 |
+
except KeyError:
|
| 1022 |
+
raise RuntimeError('Please link in cpp generated lib for %s' % (name))
|
| 1023 |
+
elif serialized_pb:
|
| 1024 |
+
return _message.default_pool.AddSerializedFile(serialized_pb)
|
| 1025 |
+
else:
|
| 1026 |
+
return super(FileDescriptor, cls).__new__(cls)
|
| 1027 |
+
|
| 1028 |
+
def __init__(self, name, package, options=None,
|
| 1029 |
+
serialized_options=None, serialized_pb=None,
|
| 1030 |
+
dependencies=None, public_dependencies=None,
|
| 1031 |
+
syntax=None, pool=None, create_key=None):
|
| 1032 |
+
"""Constructor."""
|
| 1033 |
+
if create_key is not _internal_create_key:
|
| 1034 |
+
_Deprecated('FileDescriptor')
|
| 1035 |
+
|
| 1036 |
+
super(FileDescriptor, self).__init__(
|
| 1037 |
+
options, serialized_options, 'FileOptions')
|
| 1038 |
+
|
| 1039 |
+
if pool is None:
|
| 1040 |
+
from google.protobuf import descriptor_pool
|
| 1041 |
+
pool = descriptor_pool.Default()
|
| 1042 |
+
self.pool = pool
|
| 1043 |
+
self.message_types_by_name = {}
|
| 1044 |
+
self.name = name
|
| 1045 |
+
self.package = package
|
| 1046 |
+
self.syntax = syntax or "proto2"
|
| 1047 |
+
self.serialized_pb = serialized_pb
|
| 1048 |
+
|
| 1049 |
+
self.enum_types_by_name = {}
|
| 1050 |
+
self.extensions_by_name = {}
|
| 1051 |
+
self.services_by_name = {}
|
| 1052 |
+
self.dependencies = (dependencies or [])
|
| 1053 |
+
self.public_dependencies = (public_dependencies or [])
|
| 1054 |
+
|
| 1055 |
+
def CopyToProto(self, proto):
|
| 1056 |
+
"""Copies this to a descriptor_pb2.FileDescriptorProto.
|
| 1057 |
+
|
| 1058 |
+
Args:
|
| 1059 |
+
proto: An empty descriptor_pb2.FileDescriptorProto.
|
| 1060 |
+
"""
|
| 1061 |
+
proto.ParseFromString(self.serialized_pb)
|
| 1062 |
+
|
| 1063 |
+
|
| 1064 |
+
def _ParseOptions(message, string):
|
| 1065 |
+
"""Parses serialized options.
|
| 1066 |
+
|
| 1067 |
+
This helper function is used to parse serialized options in generated
|
| 1068 |
+
proto2 files. It must not be used outside proto2.
|
| 1069 |
+
"""
|
| 1070 |
+
message.ParseFromString(string)
|
| 1071 |
+
return message
|
| 1072 |
+
|
| 1073 |
+
|
| 1074 |
+
def _ToCamelCase(name):
|
| 1075 |
+
"""Converts name to camel-case and returns it."""
|
| 1076 |
+
capitalize_next = False
|
| 1077 |
+
result = []
|
| 1078 |
+
|
| 1079 |
+
for c in name:
|
| 1080 |
+
if c == '_':
|
| 1081 |
+
if result:
|
| 1082 |
+
capitalize_next = True
|
| 1083 |
+
elif capitalize_next:
|
| 1084 |
+
result.append(c.upper())
|
| 1085 |
+
capitalize_next = False
|
| 1086 |
+
else:
|
| 1087 |
+
result += c
|
| 1088 |
+
|
| 1089 |
+
# Lower-case the first letter.
|
| 1090 |
+
if result and result[0].isupper():
|
| 1091 |
+
result[0] = result[0].lower()
|
| 1092 |
+
return ''.join(result)
|
| 1093 |
+
|
| 1094 |
+
|
| 1095 |
+
def _OptionsOrNone(descriptor_proto):
|
| 1096 |
+
"""Returns the value of the field `options`, or None if it is not set."""
|
| 1097 |
+
if descriptor_proto.HasField('options'):
|
| 1098 |
+
return descriptor_proto.options
|
| 1099 |
+
else:
|
| 1100 |
+
return None
|
| 1101 |
+
|
| 1102 |
+
|
| 1103 |
+
def _ToJsonName(name):
|
| 1104 |
+
"""Converts name to Json name and returns it."""
|
| 1105 |
+
capitalize_next = False
|
| 1106 |
+
result = []
|
| 1107 |
+
|
| 1108 |
+
for c in name:
|
| 1109 |
+
if c == '_':
|
| 1110 |
+
capitalize_next = True
|
| 1111 |
+
elif capitalize_next:
|
| 1112 |
+
result.append(c.upper())
|
| 1113 |
+
capitalize_next = False
|
| 1114 |
+
else:
|
| 1115 |
+
result += c
|
| 1116 |
+
|
| 1117 |
+
return ''.join(result)
|
| 1118 |
+
|
| 1119 |
+
|
| 1120 |
+
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
|
| 1121 |
+
syntax=None):
|
| 1122 |
+
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
|
| 1123 |
+
|
| 1124 |
+
Handles nested descriptors. Note that this is limited to the scope of defining
|
| 1125 |
+
a message inside of another message. Composite fields can currently only be
|
| 1126 |
+
resolved if the message is defined in the same scope as the field.
|
| 1127 |
+
|
| 1128 |
+
Args:
|
| 1129 |
+
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
| 1130 |
+
package: Optional package name for the new message Descriptor (string).
|
| 1131 |
+
build_file_if_cpp: Update the C++ descriptor pool if api matches.
|
| 1132 |
+
Set to False on recursion, so no duplicates are created.
|
| 1133 |
+
syntax: The syntax/semantics that should be used. Set to "proto3" to get
|
| 1134 |
+
proto3 field presence semantics.
|
| 1135 |
+
Returns:
|
| 1136 |
+
A Descriptor for protobuf messages.
|
| 1137 |
+
"""
|
| 1138 |
+
if api_implementation.Type() == 'cpp' and build_file_if_cpp:
|
| 1139 |
+
# The C++ implementation requires all descriptors to be backed by the same
|
| 1140 |
+
# definition in the C++ descriptor pool. To do this, we build a
|
| 1141 |
+
# FileDescriptorProto with the same definition as this descriptor and build
|
| 1142 |
+
# it into the pool.
|
| 1143 |
+
from google.protobuf import descriptor_pb2
|
| 1144 |
+
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
|
| 1145 |
+
file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
|
| 1146 |
+
|
| 1147 |
+
# Generate a random name for this proto file to prevent conflicts with any
|
| 1148 |
+
# imported ones. We need to specify a file name so the descriptor pool
|
| 1149 |
+
# accepts our FileDescriptorProto, but it is not important what that file
|
| 1150 |
+
# name is actually set to.
|
| 1151 |
+
proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')
|
| 1152 |
+
|
| 1153 |
+
if package:
|
| 1154 |
+
file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
|
| 1155 |
+
proto_name + '.proto')
|
| 1156 |
+
file_descriptor_proto.package = package
|
| 1157 |
+
else:
|
| 1158 |
+
file_descriptor_proto.name = proto_name + '.proto'
|
| 1159 |
+
|
| 1160 |
+
_message.default_pool.Add(file_descriptor_proto)
|
| 1161 |
+
result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
|
| 1162 |
+
|
| 1163 |
+
if _USE_C_DESCRIPTORS:
|
| 1164 |
+
return result.message_types_by_name[desc_proto.name]
|
| 1165 |
+
|
| 1166 |
+
full_message_name = [desc_proto.name]
|
| 1167 |
+
if package: full_message_name.insert(0, package)
|
| 1168 |
+
|
| 1169 |
+
# Create Descriptors for enum types
|
| 1170 |
+
enum_types = {}
|
| 1171 |
+
for enum_proto in desc_proto.enum_type:
|
| 1172 |
+
full_name = '.'.join(full_message_name + [enum_proto.name])
|
| 1173 |
+
enum_desc = EnumDescriptor(
|
| 1174 |
+
enum_proto.name, full_name, None, [
|
| 1175 |
+
EnumValueDescriptor(enum_val.name, ii, enum_val.number,
|
| 1176 |
+
create_key=_internal_create_key)
|
| 1177 |
+
for ii, enum_val in enumerate(enum_proto.value)],
|
| 1178 |
+
create_key=_internal_create_key)
|
| 1179 |
+
enum_types[full_name] = enum_desc
|
| 1180 |
+
|
| 1181 |
+
# Create Descriptors for nested types
|
| 1182 |
+
nested_types = {}
|
| 1183 |
+
for nested_proto in desc_proto.nested_type:
|
| 1184 |
+
full_name = '.'.join(full_message_name + [nested_proto.name])
|
| 1185 |
+
# Nested types are just those defined inside of the message, not all types
|
| 1186 |
+
# used by fields in the message, so no loops are possible here.
|
| 1187 |
+
nested_desc = MakeDescriptor(nested_proto,
|
| 1188 |
+
package='.'.join(full_message_name),
|
| 1189 |
+
build_file_if_cpp=False,
|
| 1190 |
+
syntax=syntax)
|
| 1191 |
+
nested_types[full_name] = nested_desc
|
| 1192 |
+
|
| 1193 |
+
fields = []
|
| 1194 |
+
for field_proto in desc_proto.field:
|
| 1195 |
+
full_name = '.'.join(full_message_name + [field_proto.name])
|
| 1196 |
+
enum_desc = None
|
| 1197 |
+
nested_desc = None
|
| 1198 |
+
if field_proto.json_name:
|
| 1199 |
+
json_name = field_proto.json_name
|
| 1200 |
+
else:
|
| 1201 |
+
json_name = None
|
| 1202 |
+
if field_proto.HasField('type_name'):
|
| 1203 |
+
type_name = field_proto.type_name
|
| 1204 |
+
full_type_name = '.'.join(full_message_name +
|
| 1205 |
+
[type_name[type_name.rfind('.')+1:]])
|
| 1206 |
+
if full_type_name in nested_types:
|
| 1207 |
+
nested_desc = nested_types[full_type_name]
|
| 1208 |
+
elif full_type_name in enum_types:
|
| 1209 |
+
enum_desc = enum_types[full_type_name]
|
| 1210 |
+
# Else type_name references a non-local type, which isn't implemented
|
| 1211 |
+
field = FieldDescriptor(
|
| 1212 |
+
field_proto.name, full_name, field_proto.number - 1,
|
| 1213 |
+
field_proto.number, field_proto.type,
|
| 1214 |
+
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
|
| 1215 |
+
field_proto.label, None, nested_desc, enum_desc, None, False, None,
|
| 1216 |
+
options=_OptionsOrNone(field_proto), has_default_value=False,
|
| 1217 |
+
json_name=json_name, create_key=_internal_create_key)
|
| 1218 |
+
fields.append(field)
|
| 1219 |
+
|
| 1220 |
+
desc_name = '.'.join(full_message_name)
|
| 1221 |
+
return Descriptor(desc_proto.name, desc_name, None, None, fields,
|
| 1222 |
+
list(nested_types.values()), list(enum_types.values()), [],
|
| 1223 |
+
options=_OptionsOrNone(desc_proto),
|
| 1224 |
+
create_key=_internal_create_key)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor_database.py
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Provides a container for DescriptorProtos."""
|
| 32 |
+
|
| 33 |
+
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
| 34 |
+
|
| 35 |
+
import warnings
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Error(Exception):
|
| 39 |
+
pass
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class DescriptorDatabaseConflictingDefinitionError(Error):
|
| 43 |
+
"""Raised when a proto is added with the same name & different descriptor."""
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class DescriptorDatabase(object):
|
| 47 |
+
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
|
| 48 |
+
|
| 49 |
+
def __init__(self):
|
| 50 |
+
self._file_desc_protos_by_file = {}
|
| 51 |
+
self._file_desc_protos_by_symbol = {}
|
| 52 |
+
|
| 53 |
+
def Add(self, file_desc_proto):
|
| 54 |
+
"""Adds the FileDescriptorProto and its types to this database.
|
| 55 |
+
|
| 56 |
+
Args:
|
| 57 |
+
file_desc_proto: The FileDescriptorProto to add.
|
| 58 |
+
Raises:
|
| 59 |
+
DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
|
| 60 |
+
add a proto with the same name but different definition than an
|
| 61 |
+
existing proto in the database.
|
| 62 |
+
"""
|
| 63 |
+
proto_name = file_desc_proto.name
|
| 64 |
+
if proto_name not in self._file_desc_protos_by_file:
|
| 65 |
+
self._file_desc_protos_by_file[proto_name] = file_desc_proto
|
| 66 |
+
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
|
| 67 |
+
raise DescriptorDatabaseConflictingDefinitionError(
|
| 68 |
+
'%s already added, but with different descriptor.' % proto_name)
|
| 69 |
+
else:
|
| 70 |
+
return
|
| 71 |
+
|
| 72 |
+
# Add all the top-level descriptors to the index.
|
| 73 |
+
package = file_desc_proto.package
|
| 74 |
+
for message in file_desc_proto.message_type:
|
| 75 |
+
for name in _ExtractSymbols(message, package):
|
| 76 |
+
self._AddSymbol(name, file_desc_proto)
|
| 77 |
+
for enum in file_desc_proto.enum_type:
|
| 78 |
+
self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
|
| 79 |
+
for enum_value in enum.value:
|
| 80 |
+
self._file_desc_protos_by_symbol[
|
| 81 |
+
'.'.join((package, enum_value.name))] = file_desc_proto
|
| 82 |
+
for extension in file_desc_proto.extension:
|
| 83 |
+
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
|
| 84 |
+
for service in file_desc_proto.service:
|
| 85 |
+
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
|
| 86 |
+
|
| 87 |
+
def FindFileByName(self, name):
|
| 88 |
+
"""Finds the file descriptor proto by file name.
|
| 89 |
+
|
| 90 |
+
Typically the file name is a relative path ending to a .proto file. The
|
| 91 |
+
proto with the given name will have to have been added to this database
|
| 92 |
+
using the Add method or else an error will be raised.
|
| 93 |
+
|
| 94 |
+
Args:
|
| 95 |
+
name: The file name to find.
|
| 96 |
+
|
| 97 |
+
Returns:
|
| 98 |
+
The file descriptor proto matching the name.
|
| 99 |
+
|
| 100 |
+
Raises:
|
| 101 |
+
KeyError if no file by the given name was added.
|
| 102 |
+
"""
|
| 103 |
+
|
| 104 |
+
return self._file_desc_protos_by_file[name]
|
| 105 |
+
|
| 106 |
+
def FindFileContainingSymbol(self, symbol):
|
| 107 |
+
"""Finds the file descriptor proto containing the specified symbol.
|
| 108 |
+
|
| 109 |
+
The symbol should be a fully qualified name including the file descriptor's
|
| 110 |
+
package and any containing messages. Some examples:
|
| 111 |
+
|
| 112 |
+
'some.package.name.Message'
|
| 113 |
+
'some.package.name.Message.NestedEnum'
|
| 114 |
+
'some.package.name.Message.some_field'
|
| 115 |
+
|
| 116 |
+
The file descriptor proto containing the specified symbol must be added to
|
| 117 |
+
this database using the Add method or else an error will be raised.
|
| 118 |
+
|
| 119 |
+
Args:
|
| 120 |
+
symbol: The fully qualified symbol name.
|
| 121 |
+
|
| 122 |
+
Returns:
|
| 123 |
+
The file descriptor proto containing the symbol.
|
| 124 |
+
|
| 125 |
+
Raises:
|
| 126 |
+
KeyError if no file contains the specified symbol.
|
| 127 |
+
"""
|
| 128 |
+
try:
|
| 129 |
+
return self._file_desc_protos_by_symbol[symbol]
|
| 130 |
+
except KeyError:
|
| 131 |
+
# Fields, enum values, and nested extensions are not in
|
| 132 |
+
# _file_desc_protos_by_symbol. Try to find the top level
|
| 133 |
+
# descriptor. Non-existent nested symbol under a valid top level
|
| 134 |
+
# descriptor can also be found. The behavior is the same with
|
| 135 |
+
# protobuf C++.
|
| 136 |
+
top_level, _, _ = symbol.rpartition('.')
|
| 137 |
+
try:
|
| 138 |
+
return self._file_desc_protos_by_symbol[top_level]
|
| 139 |
+
except KeyError:
|
| 140 |
+
# Raise the original symbol as a KeyError for better diagnostics.
|
| 141 |
+
raise KeyError(symbol)
|
| 142 |
+
|
| 143 |
+
def FindFileContainingExtension(self, extendee_name, extension_number):
|
| 144 |
+
# TODO(jieluo): implement this API.
|
| 145 |
+
return None
|
| 146 |
+
|
| 147 |
+
def FindAllExtensionNumbers(self, extendee_name):
|
| 148 |
+
# TODO(jieluo): implement this API.
|
| 149 |
+
return []
|
| 150 |
+
|
| 151 |
+
def _AddSymbol(self, name, file_desc_proto):
|
| 152 |
+
if name in self._file_desc_protos_by_symbol:
|
| 153 |
+
warn_msg = ('Conflict register for file "' + file_desc_proto.name +
|
| 154 |
+
'": ' + name +
|
| 155 |
+
' is already defined in file "' +
|
| 156 |
+
self._file_desc_protos_by_symbol[name].name + '"')
|
| 157 |
+
warnings.warn(warn_msg, RuntimeWarning)
|
| 158 |
+
self._file_desc_protos_by_symbol[name] = file_desc_proto
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
def _ExtractSymbols(desc_proto, package):
|
| 162 |
+
"""Pulls out all the symbols from a descriptor proto.
|
| 163 |
+
|
| 164 |
+
Args:
|
| 165 |
+
desc_proto: The proto to extract symbols from.
|
| 166 |
+
package: The package containing the descriptor type.
|
| 167 |
+
|
| 168 |
+
Yields:
|
| 169 |
+
The fully qualified name found in the descriptor.
|
| 170 |
+
"""
|
| 171 |
+
message_name = package + '.' + desc_proto.name if package else desc_proto.name
|
| 172 |
+
yield message_name
|
| 173 |
+
for nested_type in desc_proto.nested_type:
|
| 174 |
+
for symbol in _ExtractSymbols(nested_type, message_name):
|
| 175 |
+
yield symbol
|
| 176 |
+
for enum_type in desc_proto.enum_type:
|
| 177 |
+
yield '.'.join((message_name, enum_type.name))
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor_pb2.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/descriptor_pool.py
ADDED
|
@@ -0,0 +1,1295 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Provides DescriptorPool to use as a container for proto2 descriptors.
|
| 32 |
+
|
| 33 |
+
The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
|
| 34 |
+
a collection of protocol buffer descriptors for use when dynamically creating
|
| 35 |
+
message types at runtime.
|
| 36 |
+
|
| 37 |
+
For most applications protocol buffers should be used via modules generated by
|
| 38 |
+
the protocol buffer compiler tool. This should only be used when the type of
|
| 39 |
+
protocol buffers used in an application or library cannot be predetermined.
|
| 40 |
+
|
| 41 |
+
Below is a straightforward example on how to use this class::
|
| 42 |
+
|
| 43 |
+
pool = DescriptorPool()
|
| 44 |
+
file_descriptor_protos = [ ... ]
|
| 45 |
+
for file_descriptor_proto in file_descriptor_protos:
|
| 46 |
+
pool.Add(file_descriptor_proto)
|
| 47 |
+
my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
|
| 48 |
+
|
| 49 |
+
The message descriptor can be used in conjunction with the message_factory
|
| 50 |
+
module in order to create a protocol buffer class that can be encoded and
|
| 51 |
+
decoded.
|
| 52 |
+
|
| 53 |
+
If you want to get a Python class for the specified proto, use the
|
| 54 |
+
helper functions inside google.protobuf.message_factory
|
| 55 |
+
directly instead of this class.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
| 59 |
+
|
| 60 |
+
import collections
|
| 61 |
+
import warnings
|
| 62 |
+
|
| 63 |
+
from google.protobuf import descriptor
|
| 64 |
+
from google.protobuf import descriptor_database
|
| 65 |
+
from google.protobuf import text_encoding
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def _Deprecated(func):
|
| 72 |
+
"""Mark functions as deprecated."""
|
| 73 |
+
|
| 74 |
+
def NewFunc(*args, **kwargs):
|
| 75 |
+
warnings.warn(
|
| 76 |
+
'Call to deprecated function %s(). Note: Do add unlinked descriptors '
|
| 77 |
+
'to descriptor_pool is wrong. Use Add() or AddSerializedFile() '
|
| 78 |
+
'instead.' % func.__name__,
|
| 79 |
+
category=DeprecationWarning)
|
| 80 |
+
return func(*args, **kwargs)
|
| 81 |
+
NewFunc.__name__ = func.__name__
|
| 82 |
+
NewFunc.__doc__ = func.__doc__
|
| 83 |
+
NewFunc.__dict__.update(func.__dict__)
|
| 84 |
+
return NewFunc
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def _NormalizeFullyQualifiedName(name):
|
| 88 |
+
"""Remove leading period from fully-qualified type name.
|
| 89 |
+
|
| 90 |
+
Due to b/13860351 in descriptor_database.py, types in the root namespace are
|
| 91 |
+
generated with a leading period. This function removes that prefix.
|
| 92 |
+
|
| 93 |
+
Args:
|
| 94 |
+
name (str): The fully-qualified symbol name.
|
| 95 |
+
|
| 96 |
+
Returns:
|
| 97 |
+
str: The normalized fully-qualified symbol name.
|
| 98 |
+
"""
|
| 99 |
+
return name.lstrip('.')
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def _OptionsOrNone(descriptor_proto):
|
| 103 |
+
"""Returns the value of the field `options`, or None if it is not set."""
|
| 104 |
+
if descriptor_proto.HasField('options'):
|
| 105 |
+
return descriptor_proto.options
|
| 106 |
+
else:
|
| 107 |
+
return None
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _IsMessageSetExtension(field):
|
| 111 |
+
return (field.is_extension and
|
| 112 |
+
field.containing_type.has_options and
|
| 113 |
+
field.containing_type.GetOptions().message_set_wire_format and
|
| 114 |
+
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
| 115 |
+
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class DescriptorPool(object):
|
| 119 |
+
"""A collection of protobufs dynamically constructed by descriptor protos."""
|
| 120 |
+
|
| 121 |
+
if _USE_C_DESCRIPTORS:
|
| 122 |
+
|
| 123 |
+
def __new__(cls, descriptor_db=None):
|
| 124 |
+
# pylint: disable=protected-access
|
| 125 |
+
return descriptor._message.DescriptorPool(descriptor_db)
|
| 126 |
+
|
| 127 |
+
def __init__(self, descriptor_db=None):
|
| 128 |
+
"""Initializes a Pool of proto buffs.
|
| 129 |
+
|
| 130 |
+
The descriptor_db argument to the constructor is provided to allow
|
| 131 |
+
specialized file descriptor proto lookup code to be triggered on demand. An
|
| 132 |
+
example would be an implementation which will read and compile a file
|
| 133 |
+
specified in a call to FindFileByName() and not require the call to Add()
|
| 134 |
+
at all. Results from this database will be cached internally here as well.
|
| 135 |
+
|
| 136 |
+
Args:
|
| 137 |
+
descriptor_db: A secondary source of file descriptors.
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
self._internal_db = descriptor_database.DescriptorDatabase()
|
| 141 |
+
self._descriptor_db = descriptor_db
|
| 142 |
+
self._descriptors = {}
|
| 143 |
+
self._enum_descriptors = {}
|
| 144 |
+
self._service_descriptors = {}
|
| 145 |
+
self._file_descriptors = {}
|
| 146 |
+
self._toplevel_extensions = {}
|
| 147 |
+
# TODO(jieluo): Remove _file_desc_by_toplevel_extension after
|
| 148 |
+
# maybe year 2020 for compatibility issue (with 3.4.1 only).
|
| 149 |
+
self._file_desc_by_toplevel_extension = {}
|
| 150 |
+
self._top_enum_values = {}
|
| 151 |
+
# We store extensions in two two-level mappings: The first key is the
|
| 152 |
+
# descriptor of the message being extended, the second key is the extension
|
| 153 |
+
# full name or its tag number.
|
| 154 |
+
self._extensions_by_name = collections.defaultdict(dict)
|
| 155 |
+
self._extensions_by_number = collections.defaultdict(dict)
|
| 156 |
+
|
| 157 |
+
def _CheckConflictRegister(self, desc, desc_name, file_name):
|
| 158 |
+
"""Check if the descriptor name conflicts with another of the same name.
|
| 159 |
+
|
| 160 |
+
Args:
|
| 161 |
+
desc: Descriptor of a message, enum, service, extension or enum value.
|
| 162 |
+
desc_name (str): the full name of desc.
|
| 163 |
+
file_name (str): The file name of descriptor.
|
| 164 |
+
"""
|
| 165 |
+
for register, descriptor_type in [
|
| 166 |
+
(self._descriptors, descriptor.Descriptor),
|
| 167 |
+
(self._enum_descriptors, descriptor.EnumDescriptor),
|
| 168 |
+
(self._service_descriptors, descriptor.ServiceDescriptor),
|
| 169 |
+
(self._toplevel_extensions, descriptor.FieldDescriptor),
|
| 170 |
+
(self._top_enum_values, descriptor.EnumValueDescriptor)]:
|
| 171 |
+
if desc_name in register:
|
| 172 |
+
old_desc = register[desc_name]
|
| 173 |
+
if isinstance(old_desc, descriptor.EnumValueDescriptor):
|
| 174 |
+
old_file = old_desc.type.file.name
|
| 175 |
+
else:
|
| 176 |
+
old_file = old_desc.file.name
|
| 177 |
+
|
| 178 |
+
if not isinstance(desc, descriptor_type) or (
|
| 179 |
+
old_file != file_name):
|
| 180 |
+
error_msg = ('Conflict register for file "' + file_name +
|
| 181 |
+
'": ' + desc_name +
|
| 182 |
+
' is already defined in file "' +
|
| 183 |
+
old_file + '". Please fix the conflict by adding '
|
| 184 |
+
'package name on the proto file, or use different '
|
| 185 |
+
'name for the duplication.')
|
| 186 |
+
if isinstance(desc, descriptor.EnumValueDescriptor):
|
| 187 |
+
error_msg += ('\nNote: enum values appear as '
|
| 188 |
+
'siblings of the enum type instead of '
|
| 189 |
+
'children of it.')
|
| 190 |
+
|
| 191 |
+
raise TypeError(error_msg)
|
| 192 |
+
|
| 193 |
+
return
|
| 194 |
+
|
| 195 |
+
def Add(self, file_desc_proto):
|
| 196 |
+
"""Adds the FileDescriptorProto and its types to this pool.
|
| 197 |
+
|
| 198 |
+
Args:
|
| 199 |
+
file_desc_proto (FileDescriptorProto): The file descriptor to add.
|
| 200 |
+
"""
|
| 201 |
+
|
| 202 |
+
self._internal_db.Add(file_desc_proto)
|
| 203 |
+
|
| 204 |
+
def AddSerializedFile(self, serialized_file_desc_proto):
|
| 205 |
+
"""Adds the FileDescriptorProto and its types to this pool.
|
| 206 |
+
|
| 207 |
+
Args:
|
| 208 |
+
serialized_file_desc_proto (bytes): A bytes string, serialization of the
|
| 209 |
+
:class:`FileDescriptorProto` to add.
|
| 210 |
+
|
| 211 |
+
Returns:
|
| 212 |
+
FileDescriptor: Descriptor for the added file.
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
# pylint: disable=g-import-not-at-top
|
| 216 |
+
from google.protobuf import descriptor_pb2
|
| 217 |
+
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
|
| 218 |
+
serialized_file_desc_proto)
|
| 219 |
+
file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
|
| 220 |
+
file_desc.serialized_pb = serialized_file_desc_proto
|
| 221 |
+
return file_desc
|
| 222 |
+
|
| 223 |
+
# Add Descriptor to descriptor pool is dreprecated. Please use Add()
|
| 224 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 225 |
+
@_Deprecated
|
| 226 |
+
def AddDescriptor(self, desc):
|
| 227 |
+
self._AddDescriptor(desc)
|
| 228 |
+
|
| 229 |
+
# Never call this method. It is for internal usage only.
|
| 230 |
+
def _AddDescriptor(self, desc):
|
| 231 |
+
"""Adds a Descriptor to the pool, non-recursively.
|
| 232 |
+
|
| 233 |
+
If the Descriptor contains nested messages or enums, the caller must
|
| 234 |
+
explicitly register them. This method also registers the FileDescriptor
|
| 235 |
+
associated with the message.
|
| 236 |
+
|
| 237 |
+
Args:
|
| 238 |
+
desc: A Descriptor.
|
| 239 |
+
"""
|
| 240 |
+
if not isinstance(desc, descriptor.Descriptor):
|
| 241 |
+
raise TypeError('Expected instance of descriptor.Descriptor.')
|
| 242 |
+
|
| 243 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 244 |
+
|
| 245 |
+
self._descriptors[desc.full_name] = desc
|
| 246 |
+
self._AddFileDescriptor(desc.file)
|
| 247 |
+
|
| 248 |
+
# Add EnumDescriptor to descriptor pool is dreprecated. Please use Add()
|
| 249 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 250 |
+
@_Deprecated
|
| 251 |
+
def AddEnumDescriptor(self, enum_desc):
|
| 252 |
+
self._AddEnumDescriptor(enum_desc)
|
| 253 |
+
|
| 254 |
+
# Never call this method. It is for internal usage only.
|
| 255 |
+
def _AddEnumDescriptor(self, enum_desc):
|
| 256 |
+
"""Adds an EnumDescriptor to the pool.
|
| 257 |
+
|
| 258 |
+
This method also registers the FileDescriptor associated with the enum.
|
| 259 |
+
|
| 260 |
+
Args:
|
| 261 |
+
enum_desc: An EnumDescriptor.
|
| 262 |
+
"""
|
| 263 |
+
|
| 264 |
+
if not isinstance(enum_desc, descriptor.EnumDescriptor):
|
| 265 |
+
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
|
| 266 |
+
|
| 267 |
+
file_name = enum_desc.file.name
|
| 268 |
+
self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
|
| 269 |
+
self._enum_descriptors[enum_desc.full_name] = enum_desc
|
| 270 |
+
|
| 271 |
+
# Top enum values need to be indexed.
|
| 272 |
+
# Count the number of dots to see whether the enum is toplevel or nested
|
| 273 |
+
# in a message. We cannot use enum_desc.containing_type at this stage.
|
| 274 |
+
if enum_desc.file.package:
|
| 275 |
+
top_level = (enum_desc.full_name.count('.')
|
| 276 |
+
- enum_desc.file.package.count('.') == 1)
|
| 277 |
+
else:
|
| 278 |
+
top_level = enum_desc.full_name.count('.') == 0
|
| 279 |
+
if top_level:
|
| 280 |
+
file_name = enum_desc.file.name
|
| 281 |
+
package = enum_desc.file.package
|
| 282 |
+
for enum_value in enum_desc.values:
|
| 283 |
+
full_name = _NormalizeFullyQualifiedName(
|
| 284 |
+
'.'.join((package, enum_value.name)))
|
| 285 |
+
self._CheckConflictRegister(enum_value, full_name, file_name)
|
| 286 |
+
self._top_enum_values[full_name] = enum_value
|
| 287 |
+
self._AddFileDescriptor(enum_desc.file)
|
| 288 |
+
|
| 289 |
+
# Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add()
|
| 290 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 291 |
+
@_Deprecated
|
| 292 |
+
def AddServiceDescriptor(self, service_desc):
|
| 293 |
+
self._AddServiceDescriptor(service_desc)
|
| 294 |
+
|
| 295 |
+
# Never call this method. It is for internal usage only.
|
| 296 |
+
def _AddServiceDescriptor(self, service_desc):
|
| 297 |
+
"""Adds a ServiceDescriptor to the pool.
|
| 298 |
+
|
| 299 |
+
Args:
|
| 300 |
+
service_desc: A ServiceDescriptor.
|
| 301 |
+
"""
|
| 302 |
+
|
| 303 |
+
if not isinstance(service_desc, descriptor.ServiceDescriptor):
|
| 304 |
+
raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
|
| 305 |
+
|
| 306 |
+
self._CheckConflictRegister(service_desc, service_desc.full_name,
|
| 307 |
+
service_desc.file.name)
|
| 308 |
+
self._service_descriptors[service_desc.full_name] = service_desc
|
| 309 |
+
|
| 310 |
+
# Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add()
|
| 311 |
+
# or AddSerializedFile() to add a FileDescriptorProto instead.
|
| 312 |
+
@_Deprecated
|
| 313 |
+
def AddExtensionDescriptor(self, extension):
|
| 314 |
+
self._AddExtensionDescriptor(extension)
|
| 315 |
+
|
| 316 |
+
# Never call this method. It is for internal usage only.
|
| 317 |
+
def _AddExtensionDescriptor(self, extension):
|
| 318 |
+
"""Adds a FieldDescriptor describing an extension to the pool.
|
| 319 |
+
|
| 320 |
+
Args:
|
| 321 |
+
extension: A FieldDescriptor.
|
| 322 |
+
|
| 323 |
+
Raises:
|
| 324 |
+
AssertionError: when another extension with the same number extends the
|
| 325 |
+
same message.
|
| 326 |
+
TypeError: when the specified extension is not a
|
| 327 |
+
descriptor.FieldDescriptor.
|
| 328 |
+
"""
|
| 329 |
+
if not (isinstance(extension, descriptor.FieldDescriptor) and
|
| 330 |
+
extension.is_extension):
|
| 331 |
+
raise TypeError('Expected an extension descriptor.')
|
| 332 |
+
|
| 333 |
+
if extension.extension_scope is None:
|
| 334 |
+
self._toplevel_extensions[extension.full_name] = extension
|
| 335 |
+
|
| 336 |
+
try:
|
| 337 |
+
existing_desc = self._extensions_by_number[
|
| 338 |
+
extension.containing_type][extension.number]
|
| 339 |
+
except KeyError:
|
| 340 |
+
pass
|
| 341 |
+
else:
|
| 342 |
+
if extension is not existing_desc:
|
| 343 |
+
raise AssertionError(
|
| 344 |
+
'Extensions "%s" and "%s" both try to extend message type "%s" '
|
| 345 |
+
'with field number %d.' %
|
| 346 |
+
(extension.full_name, existing_desc.full_name,
|
| 347 |
+
extension.containing_type.full_name, extension.number))
|
| 348 |
+
|
| 349 |
+
self._extensions_by_number[extension.containing_type][
|
| 350 |
+
extension.number] = extension
|
| 351 |
+
self._extensions_by_name[extension.containing_type][
|
| 352 |
+
extension.full_name] = extension
|
| 353 |
+
|
| 354 |
+
# Also register MessageSet extensions with the type name.
|
| 355 |
+
if _IsMessageSetExtension(extension):
|
| 356 |
+
self._extensions_by_name[extension.containing_type][
|
| 357 |
+
extension.message_type.full_name] = extension
|
| 358 |
+
|
| 359 |
+
@_Deprecated
|
| 360 |
+
def AddFileDescriptor(self, file_desc):
|
| 361 |
+
self._InternalAddFileDescriptor(file_desc)
|
| 362 |
+
|
| 363 |
+
# Never call this method. It is for internal usage only.
|
| 364 |
+
def _InternalAddFileDescriptor(self, file_desc):
|
| 365 |
+
"""Adds a FileDescriptor to the pool, non-recursively.
|
| 366 |
+
|
| 367 |
+
If the FileDescriptor contains messages or enums, the caller must explicitly
|
| 368 |
+
register them.
|
| 369 |
+
|
| 370 |
+
Args:
|
| 371 |
+
file_desc: A FileDescriptor.
|
| 372 |
+
"""
|
| 373 |
+
|
| 374 |
+
self._AddFileDescriptor(file_desc)
|
| 375 |
+
# TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
|
| 376 |
+
# FieldDescriptor.file is added in code gen. Remove this solution after
|
| 377 |
+
# maybe 2020 for compatibility reason (with 3.4.1 only).
|
| 378 |
+
for extension in file_desc.extensions_by_name.values():
|
| 379 |
+
self._file_desc_by_toplevel_extension[
|
| 380 |
+
extension.full_name] = file_desc
|
| 381 |
+
|
| 382 |
+
def _AddFileDescriptor(self, file_desc):
|
| 383 |
+
"""Adds a FileDescriptor to the pool, non-recursively.
|
| 384 |
+
|
| 385 |
+
If the FileDescriptor contains messages or enums, the caller must explicitly
|
| 386 |
+
register them.
|
| 387 |
+
|
| 388 |
+
Args:
|
| 389 |
+
file_desc: A FileDescriptor.
|
| 390 |
+
"""
|
| 391 |
+
|
| 392 |
+
if not isinstance(file_desc, descriptor.FileDescriptor):
|
| 393 |
+
raise TypeError('Expected instance of descriptor.FileDescriptor.')
|
| 394 |
+
self._file_descriptors[file_desc.name] = file_desc
|
| 395 |
+
|
| 396 |
+
def FindFileByName(self, file_name):
|
| 397 |
+
"""Gets a FileDescriptor by file name.
|
| 398 |
+
|
| 399 |
+
Args:
|
| 400 |
+
file_name (str): The path to the file to get a descriptor for.
|
| 401 |
+
|
| 402 |
+
Returns:
|
| 403 |
+
FileDescriptor: The descriptor for the named file.
|
| 404 |
+
|
| 405 |
+
Raises:
|
| 406 |
+
KeyError: if the file cannot be found in the pool.
|
| 407 |
+
"""
|
| 408 |
+
|
| 409 |
+
try:
|
| 410 |
+
return self._file_descriptors[file_name]
|
| 411 |
+
except KeyError:
|
| 412 |
+
pass
|
| 413 |
+
|
| 414 |
+
try:
|
| 415 |
+
file_proto = self._internal_db.FindFileByName(file_name)
|
| 416 |
+
except KeyError as error:
|
| 417 |
+
if self._descriptor_db:
|
| 418 |
+
file_proto = self._descriptor_db.FindFileByName(file_name)
|
| 419 |
+
else:
|
| 420 |
+
raise error
|
| 421 |
+
if not file_proto:
|
| 422 |
+
raise KeyError('Cannot find a file named %s' % file_name)
|
| 423 |
+
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
| 424 |
+
|
| 425 |
+
def FindFileContainingSymbol(self, symbol):
|
| 426 |
+
"""Gets the FileDescriptor for the file containing the specified symbol.
|
| 427 |
+
|
| 428 |
+
Args:
|
| 429 |
+
symbol (str): The name of the symbol to search for.
|
| 430 |
+
|
| 431 |
+
Returns:
|
| 432 |
+
FileDescriptor: Descriptor for the file that contains the specified
|
| 433 |
+
symbol.
|
| 434 |
+
|
| 435 |
+
Raises:
|
| 436 |
+
KeyError: if the file cannot be found in the pool.
|
| 437 |
+
"""
|
| 438 |
+
|
| 439 |
+
symbol = _NormalizeFullyQualifiedName(symbol)
|
| 440 |
+
try:
|
| 441 |
+
return self._InternalFindFileContainingSymbol(symbol)
|
| 442 |
+
except KeyError:
|
| 443 |
+
pass
|
| 444 |
+
|
| 445 |
+
try:
|
| 446 |
+
# Try fallback database. Build and find again if possible.
|
| 447 |
+
self._FindFileContainingSymbolInDb(symbol)
|
| 448 |
+
return self._InternalFindFileContainingSymbol(symbol)
|
| 449 |
+
except KeyError:
|
| 450 |
+
raise KeyError('Cannot find a file containing %s' % symbol)
|
| 451 |
+
|
| 452 |
+
def _InternalFindFileContainingSymbol(self, symbol):
|
| 453 |
+
"""Gets the already built FileDescriptor containing the specified symbol.
|
| 454 |
+
|
| 455 |
+
Args:
|
| 456 |
+
symbol (str): The name of the symbol to search for.
|
| 457 |
+
|
| 458 |
+
Returns:
|
| 459 |
+
FileDescriptor: Descriptor for the file that contains the specified
|
| 460 |
+
symbol.
|
| 461 |
+
|
| 462 |
+
Raises:
|
| 463 |
+
KeyError: if the file cannot be found in the pool.
|
| 464 |
+
"""
|
| 465 |
+
try:
|
| 466 |
+
return self._descriptors[symbol].file
|
| 467 |
+
except KeyError:
|
| 468 |
+
pass
|
| 469 |
+
|
| 470 |
+
try:
|
| 471 |
+
return self._enum_descriptors[symbol].file
|
| 472 |
+
except KeyError:
|
| 473 |
+
pass
|
| 474 |
+
|
| 475 |
+
try:
|
| 476 |
+
return self._service_descriptors[symbol].file
|
| 477 |
+
except KeyError:
|
| 478 |
+
pass
|
| 479 |
+
|
| 480 |
+
try:
|
| 481 |
+
return self._top_enum_values[symbol].type.file
|
| 482 |
+
except KeyError:
|
| 483 |
+
pass
|
| 484 |
+
|
| 485 |
+
try:
|
| 486 |
+
return self._file_desc_by_toplevel_extension[symbol]
|
| 487 |
+
except KeyError:
|
| 488 |
+
pass
|
| 489 |
+
|
| 490 |
+
# Try fields, enum values and nested extensions inside a message.
|
| 491 |
+
top_name, _, sub_name = symbol.rpartition('.')
|
| 492 |
+
try:
|
| 493 |
+
message = self.FindMessageTypeByName(top_name)
|
| 494 |
+
assert (sub_name in message.extensions_by_name or
|
| 495 |
+
sub_name in message.fields_by_name or
|
| 496 |
+
sub_name in message.enum_values_by_name)
|
| 497 |
+
return message.file
|
| 498 |
+
except (KeyError, AssertionError):
|
| 499 |
+
raise KeyError('Cannot find a file containing %s' % symbol)
|
| 500 |
+
|
| 501 |
+
def FindMessageTypeByName(self, full_name):
|
| 502 |
+
"""Loads the named descriptor from the pool.
|
| 503 |
+
|
| 504 |
+
Args:
|
| 505 |
+
full_name (str): The full name of the descriptor to load.
|
| 506 |
+
|
| 507 |
+
Returns:
|
| 508 |
+
Descriptor: The descriptor for the named type.
|
| 509 |
+
|
| 510 |
+
Raises:
|
| 511 |
+
KeyError: if the message cannot be found in the pool.
|
| 512 |
+
"""
|
| 513 |
+
|
| 514 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 515 |
+
if full_name not in self._descriptors:
|
| 516 |
+
self._FindFileContainingSymbolInDb(full_name)
|
| 517 |
+
return self._descriptors[full_name]
|
| 518 |
+
|
| 519 |
+
def FindEnumTypeByName(self, full_name):
|
| 520 |
+
"""Loads the named enum descriptor from the pool.
|
| 521 |
+
|
| 522 |
+
Args:
|
| 523 |
+
full_name (str): The full name of the enum descriptor to load.
|
| 524 |
+
|
| 525 |
+
Returns:
|
| 526 |
+
EnumDescriptor: The enum descriptor for the named type.
|
| 527 |
+
|
| 528 |
+
Raises:
|
| 529 |
+
KeyError: if the enum cannot be found in the pool.
|
| 530 |
+
"""
|
| 531 |
+
|
| 532 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 533 |
+
if full_name not in self._enum_descriptors:
|
| 534 |
+
self._FindFileContainingSymbolInDb(full_name)
|
| 535 |
+
return self._enum_descriptors[full_name]
|
| 536 |
+
|
| 537 |
+
def FindFieldByName(self, full_name):
|
| 538 |
+
"""Loads the named field descriptor from the pool.
|
| 539 |
+
|
| 540 |
+
Args:
|
| 541 |
+
full_name (str): The full name of the field descriptor to load.
|
| 542 |
+
|
| 543 |
+
Returns:
|
| 544 |
+
FieldDescriptor: The field descriptor for the named field.
|
| 545 |
+
|
| 546 |
+
Raises:
|
| 547 |
+
KeyError: if the field cannot be found in the pool.
|
| 548 |
+
"""
|
| 549 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 550 |
+
message_name, _, field_name = full_name.rpartition('.')
|
| 551 |
+
message_descriptor = self.FindMessageTypeByName(message_name)
|
| 552 |
+
return message_descriptor.fields_by_name[field_name]
|
| 553 |
+
|
| 554 |
+
def FindOneofByName(self, full_name):
|
| 555 |
+
"""Loads the named oneof descriptor from the pool.
|
| 556 |
+
|
| 557 |
+
Args:
|
| 558 |
+
full_name (str): The full name of the oneof descriptor to load.
|
| 559 |
+
|
| 560 |
+
Returns:
|
| 561 |
+
OneofDescriptor: The oneof descriptor for the named oneof.
|
| 562 |
+
|
| 563 |
+
Raises:
|
| 564 |
+
KeyError: if the oneof cannot be found in the pool.
|
| 565 |
+
"""
|
| 566 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 567 |
+
message_name, _, oneof_name = full_name.rpartition('.')
|
| 568 |
+
message_descriptor = self.FindMessageTypeByName(message_name)
|
| 569 |
+
return message_descriptor.oneofs_by_name[oneof_name]
|
| 570 |
+
|
| 571 |
+
def FindExtensionByName(self, full_name):
|
| 572 |
+
"""Loads the named extension descriptor from the pool.
|
| 573 |
+
|
| 574 |
+
Args:
|
| 575 |
+
full_name (str): The full name of the extension descriptor to load.
|
| 576 |
+
|
| 577 |
+
Returns:
|
| 578 |
+
FieldDescriptor: The field descriptor for the named extension.
|
| 579 |
+
|
| 580 |
+
Raises:
|
| 581 |
+
KeyError: if the extension cannot be found in the pool.
|
| 582 |
+
"""
|
| 583 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 584 |
+
try:
|
| 585 |
+
# The proto compiler does not give any link between the FileDescriptor
|
| 586 |
+
# and top-level extensions unless the FileDescriptorProto is added to
|
| 587 |
+
# the DescriptorDatabase, but this can impact memory usage.
|
| 588 |
+
# So we registered these extensions by name explicitly.
|
| 589 |
+
return self._toplevel_extensions[full_name]
|
| 590 |
+
except KeyError:
|
| 591 |
+
pass
|
| 592 |
+
message_name, _, extension_name = full_name.rpartition('.')
|
| 593 |
+
try:
|
| 594 |
+
# Most extensions are nested inside a message.
|
| 595 |
+
scope = self.FindMessageTypeByName(message_name)
|
| 596 |
+
except KeyError:
|
| 597 |
+
# Some extensions are defined at file scope.
|
| 598 |
+
scope = self._FindFileContainingSymbolInDb(full_name)
|
| 599 |
+
return scope.extensions_by_name[extension_name]
|
| 600 |
+
|
| 601 |
+
def FindExtensionByNumber(self, message_descriptor, number):
|
| 602 |
+
"""Gets the extension of the specified message with the specified number.
|
| 603 |
+
|
| 604 |
+
Extensions have to be registered to this pool by calling :func:`Add` or
|
| 605 |
+
:func:`AddExtensionDescriptor`.
|
| 606 |
+
|
| 607 |
+
Args:
|
| 608 |
+
message_descriptor (Descriptor): descriptor of the extended message.
|
| 609 |
+
number (int): Number of the extension field.
|
| 610 |
+
|
| 611 |
+
Returns:
|
| 612 |
+
FieldDescriptor: The descriptor for the extension.
|
| 613 |
+
|
| 614 |
+
Raises:
|
| 615 |
+
KeyError: when no extension with the given number is known for the
|
| 616 |
+
specified message.
|
| 617 |
+
"""
|
| 618 |
+
try:
|
| 619 |
+
return self._extensions_by_number[message_descriptor][number]
|
| 620 |
+
except KeyError:
|
| 621 |
+
self._TryLoadExtensionFromDB(message_descriptor, number)
|
| 622 |
+
return self._extensions_by_number[message_descriptor][number]
|
| 623 |
+
|
| 624 |
+
def FindAllExtensions(self, message_descriptor):
|
| 625 |
+
"""Gets all the known extensions of a given message.
|
| 626 |
+
|
| 627 |
+
Extensions have to be registered to this pool by build related
|
| 628 |
+
:func:`Add` or :func:`AddExtensionDescriptor`.
|
| 629 |
+
|
| 630 |
+
Args:
|
| 631 |
+
message_descriptor (Descriptor): Descriptor of the extended message.
|
| 632 |
+
|
| 633 |
+
Returns:
|
| 634 |
+
list[FieldDescriptor]: Field descriptors describing the extensions.
|
| 635 |
+
"""
|
| 636 |
+
# Fallback to descriptor db if FindAllExtensionNumbers is provided.
|
| 637 |
+
if self._descriptor_db and hasattr(
|
| 638 |
+
self._descriptor_db, 'FindAllExtensionNumbers'):
|
| 639 |
+
full_name = message_descriptor.full_name
|
| 640 |
+
all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
|
| 641 |
+
for number in all_numbers:
|
| 642 |
+
if number in self._extensions_by_number[message_descriptor]:
|
| 643 |
+
continue
|
| 644 |
+
self._TryLoadExtensionFromDB(message_descriptor, number)
|
| 645 |
+
|
| 646 |
+
return list(self._extensions_by_number[message_descriptor].values())
|
| 647 |
+
|
| 648 |
+
def _TryLoadExtensionFromDB(self, message_descriptor, number):
|
| 649 |
+
"""Try to Load extensions from descriptor db.
|
| 650 |
+
|
| 651 |
+
Args:
|
| 652 |
+
message_descriptor: descriptor of the extended message.
|
| 653 |
+
number: the extension number that needs to be loaded.
|
| 654 |
+
"""
|
| 655 |
+
if not self._descriptor_db:
|
| 656 |
+
return
|
| 657 |
+
# Only supported when FindFileContainingExtension is provided.
|
| 658 |
+
if not hasattr(
|
| 659 |
+
self._descriptor_db, 'FindFileContainingExtension'):
|
| 660 |
+
return
|
| 661 |
+
|
| 662 |
+
full_name = message_descriptor.full_name
|
| 663 |
+
file_proto = self._descriptor_db.FindFileContainingExtension(
|
| 664 |
+
full_name, number)
|
| 665 |
+
|
| 666 |
+
if file_proto is None:
|
| 667 |
+
return
|
| 668 |
+
|
| 669 |
+
try:
|
| 670 |
+
self._ConvertFileProtoToFileDescriptor(file_proto)
|
| 671 |
+
except:
|
| 672 |
+
warn_msg = ('Unable to load proto file %s for extension number %d.' %
|
| 673 |
+
(file_proto.name, number))
|
| 674 |
+
warnings.warn(warn_msg, RuntimeWarning)
|
| 675 |
+
|
| 676 |
+
def FindServiceByName(self, full_name):
|
| 677 |
+
"""Loads the named service descriptor from the pool.
|
| 678 |
+
|
| 679 |
+
Args:
|
| 680 |
+
full_name (str): The full name of the service descriptor to load.
|
| 681 |
+
|
| 682 |
+
Returns:
|
| 683 |
+
ServiceDescriptor: The service descriptor for the named service.
|
| 684 |
+
|
| 685 |
+
Raises:
|
| 686 |
+
KeyError: if the service cannot be found in the pool.
|
| 687 |
+
"""
|
| 688 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 689 |
+
if full_name not in self._service_descriptors:
|
| 690 |
+
self._FindFileContainingSymbolInDb(full_name)
|
| 691 |
+
return self._service_descriptors[full_name]
|
| 692 |
+
|
| 693 |
+
def FindMethodByName(self, full_name):
|
| 694 |
+
"""Loads the named service method descriptor from the pool.
|
| 695 |
+
|
| 696 |
+
Args:
|
| 697 |
+
full_name (str): The full name of the method descriptor to load.
|
| 698 |
+
|
| 699 |
+
Returns:
|
| 700 |
+
MethodDescriptor: The method descriptor for the service method.
|
| 701 |
+
|
| 702 |
+
Raises:
|
| 703 |
+
KeyError: if the method cannot be found in the pool.
|
| 704 |
+
"""
|
| 705 |
+
full_name = _NormalizeFullyQualifiedName(full_name)
|
| 706 |
+
service_name, _, method_name = full_name.rpartition('.')
|
| 707 |
+
service_descriptor = self.FindServiceByName(service_name)
|
| 708 |
+
return service_descriptor.methods_by_name[method_name]
|
| 709 |
+
|
| 710 |
+
def _FindFileContainingSymbolInDb(self, symbol):
|
| 711 |
+
"""Finds the file in descriptor DB containing the specified symbol.
|
| 712 |
+
|
| 713 |
+
Args:
|
| 714 |
+
symbol (str): The name of the symbol to search for.
|
| 715 |
+
|
| 716 |
+
Returns:
|
| 717 |
+
FileDescriptor: The file that contains the specified symbol.
|
| 718 |
+
|
| 719 |
+
Raises:
|
| 720 |
+
KeyError: if the file cannot be found in the descriptor database.
|
| 721 |
+
"""
|
| 722 |
+
try:
|
| 723 |
+
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
|
| 724 |
+
except KeyError as error:
|
| 725 |
+
if self._descriptor_db:
|
| 726 |
+
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
|
| 727 |
+
else:
|
| 728 |
+
raise error
|
| 729 |
+
if not file_proto:
|
| 730 |
+
raise KeyError('Cannot find a file containing %s' % symbol)
|
| 731 |
+
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
| 732 |
+
|
| 733 |
+
def _ConvertFileProtoToFileDescriptor(self, file_proto):
|
| 734 |
+
"""Creates a FileDescriptor from a proto or returns a cached copy.
|
| 735 |
+
|
| 736 |
+
This method also has the side effect of loading all the symbols found in
|
| 737 |
+
the file into the appropriate dictionaries in the pool.
|
| 738 |
+
|
| 739 |
+
Args:
|
| 740 |
+
file_proto: The proto to convert.
|
| 741 |
+
|
| 742 |
+
Returns:
|
| 743 |
+
A FileDescriptor matching the passed in proto.
|
| 744 |
+
"""
|
| 745 |
+
if file_proto.name not in self._file_descriptors:
|
| 746 |
+
built_deps = list(self._GetDeps(file_proto.dependency))
|
| 747 |
+
direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
|
| 748 |
+
public_deps = [direct_deps[i] for i in file_proto.public_dependency]
|
| 749 |
+
|
| 750 |
+
file_descriptor = descriptor.FileDescriptor(
|
| 751 |
+
pool=self,
|
| 752 |
+
name=file_proto.name,
|
| 753 |
+
package=file_proto.package,
|
| 754 |
+
syntax=file_proto.syntax,
|
| 755 |
+
options=_OptionsOrNone(file_proto),
|
| 756 |
+
serialized_pb=file_proto.SerializeToString(),
|
| 757 |
+
dependencies=direct_deps,
|
| 758 |
+
public_dependencies=public_deps,
|
| 759 |
+
# pylint: disable=protected-access
|
| 760 |
+
create_key=descriptor._internal_create_key)
|
| 761 |
+
scope = {}
|
| 762 |
+
|
| 763 |
+
# This loop extracts all the message and enum types from all the
|
| 764 |
+
# dependencies of the file_proto. This is necessary to create the
|
| 765 |
+
# scope of available message types when defining the passed in
|
| 766 |
+
# file proto.
|
| 767 |
+
for dependency in built_deps:
|
| 768 |
+
scope.update(self._ExtractSymbols(
|
| 769 |
+
dependency.message_types_by_name.values()))
|
| 770 |
+
scope.update((_PrefixWithDot(enum.full_name), enum)
|
| 771 |
+
for enum in dependency.enum_types_by_name.values())
|
| 772 |
+
|
| 773 |
+
for message_type in file_proto.message_type:
|
| 774 |
+
message_desc = self._ConvertMessageDescriptor(
|
| 775 |
+
message_type, file_proto.package, file_descriptor, scope,
|
| 776 |
+
file_proto.syntax)
|
| 777 |
+
file_descriptor.message_types_by_name[message_desc.name] = (
|
| 778 |
+
message_desc)
|
| 779 |
+
|
| 780 |
+
for enum_type in file_proto.enum_type:
|
| 781 |
+
file_descriptor.enum_types_by_name[enum_type.name] = (
|
| 782 |
+
self._ConvertEnumDescriptor(enum_type, file_proto.package,
|
| 783 |
+
file_descriptor, None, scope, True))
|
| 784 |
+
|
| 785 |
+
for index, extension_proto in enumerate(file_proto.extension):
|
| 786 |
+
extension_desc = self._MakeFieldDescriptor(
|
| 787 |
+
extension_proto, file_proto.package, index, file_descriptor,
|
| 788 |
+
is_extension=True)
|
| 789 |
+
extension_desc.containing_type = self._GetTypeFromScope(
|
| 790 |
+
file_descriptor.package, extension_proto.extendee, scope)
|
| 791 |
+
self._SetFieldType(extension_proto, extension_desc,
|
| 792 |
+
file_descriptor.package, scope)
|
| 793 |
+
file_descriptor.extensions_by_name[extension_desc.name] = (
|
| 794 |
+
extension_desc)
|
| 795 |
+
self._file_desc_by_toplevel_extension[extension_desc.full_name] = (
|
| 796 |
+
file_descriptor)
|
| 797 |
+
|
| 798 |
+
for desc_proto in file_proto.message_type:
|
| 799 |
+
self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
|
| 800 |
+
|
| 801 |
+
if file_proto.package:
|
| 802 |
+
desc_proto_prefix = _PrefixWithDot(file_proto.package)
|
| 803 |
+
else:
|
| 804 |
+
desc_proto_prefix = ''
|
| 805 |
+
|
| 806 |
+
for desc_proto in file_proto.message_type:
|
| 807 |
+
desc = self._GetTypeFromScope(
|
| 808 |
+
desc_proto_prefix, desc_proto.name, scope)
|
| 809 |
+
file_descriptor.message_types_by_name[desc_proto.name] = desc
|
| 810 |
+
|
| 811 |
+
for index, service_proto in enumerate(file_proto.service):
|
| 812 |
+
file_descriptor.services_by_name[service_proto.name] = (
|
| 813 |
+
self._MakeServiceDescriptor(service_proto, index, scope,
|
| 814 |
+
file_proto.package, file_descriptor))
|
| 815 |
+
|
| 816 |
+
self._file_descriptors[file_proto.name] = file_descriptor
|
| 817 |
+
|
| 818 |
+
# Add extensions to the pool
|
| 819 |
+
file_desc = self._file_descriptors[file_proto.name]
|
| 820 |
+
for extension in file_desc.extensions_by_name.values():
|
| 821 |
+
self._AddExtensionDescriptor(extension)
|
| 822 |
+
for message_type in file_desc.message_types_by_name.values():
|
| 823 |
+
for extension in message_type.extensions:
|
| 824 |
+
self._AddExtensionDescriptor(extension)
|
| 825 |
+
|
| 826 |
+
return file_desc
|
| 827 |
+
|
| 828 |
+
def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
|
| 829 |
+
scope=None, syntax=None):
|
| 830 |
+
"""Adds the proto to the pool in the specified package.
|
| 831 |
+
|
| 832 |
+
Args:
|
| 833 |
+
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
| 834 |
+
package: The package the proto should be located in.
|
| 835 |
+
file_desc: The file containing this message.
|
| 836 |
+
scope: Dict mapping short and full symbols to message and enum types.
|
| 837 |
+
syntax: string indicating syntax of the file ("proto2" or "proto3")
|
| 838 |
+
|
| 839 |
+
Returns:
|
| 840 |
+
The added descriptor.
|
| 841 |
+
"""
|
| 842 |
+
|
| 843 |
+
if package:
|
| 844 |
+
desc_name = '.'.join((package, desc_proto.name))
|
| 845 |
+
else:
|
| 846 |
+
desc_name = desc_proto.name
|
| 847 |
+
|
| 848 |
+
if file_desc is None:
|
| 849 |
+
file_name = None
|
| 850 |
+
else:
|
| 851 |
+
file_name = file_desc.name
|
| 852 |
+
|
| 853 |
+
if scope is None:
|
| 854 |
+
scope = {}
|
| 855 |
+
|
| 856 |
+
nested = [
|
| 857 |
+
self._ConvertMessageDescriptor(
|
| 858 |
+
nested, desc_name, file_desc, scope, syntax)
|
| 859 |
+
for nested in desc_proto.nested_type]
|
| 860 |
+
enums = [
|
| 861 |
+
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
|
| 862 |
+
scope, False)
|
| 863 |
+
for enum in desc_proto.enum_type]
|
| 864 |
+
fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
|
| 865 |
+
for index, field in enumerate(desc_proto.field)]
|
| 866 |
+
extensions = [
|
| 867 |
+
self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
|
| 868 |
+
is_extension=True)
|
| 869 |
+
for index, extension in enumerate(desc_proto.extension)]
|
| 870 |
+
oneofs = [
|
| 871 |
+
# pylint: disable=g-complex-comprehension
|
| 872 |
+
descriptor.OneofDescriptor(
|
| 873 |
+
desc.name,
|
| 874 |
+
'.'.join((desc_name, desc.name)),
|
| 875 |
+
index,
|
| 876 |
+
None,
|
| 877 |
+
[],
|
| 878 |
+
_OptionsOrNone(desc),
|
| 879 |
+
# pylint: disable=protected-access
|
| 880 |
+
create_key=descriptor._internal_create_key)
|
| 881 |
+
for index, desc in enumerate(desc_proto.oneof_decl)
|
| 882 |
+
]
|
| 883 |
+
extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
|
| 884 |
+
if extension_ranges:
|
| 885 |
+
is_extendable = True
|
| 886 |
+
else:
|
| 887 |
+
is_extendable = False
|
| 888 |
+
desc = descriptor.Descriptor(
|
| 889 |
+
name=desc_proto.name,
|
| 890 |
+
full_name=desc_name,
|
| 891 |
+
filename=file_name,
|
| 892 |
+
containing_type=None,
|
| 893 |
+
fields=fields,
|
| 894 |
+
oneofs=oneofs,
|
| 895 |
+
nested_types=nested,
|
| 896 |
+
enum_types=enums,
|
| 897 |
+
extensions=extensions,
|
| 898 |
+
options=_OptionsOrNone(desc_proto),
|
| 899 |
+
is_extendable=is_extendable,
|
| 900 |
+
extension_ranges=extension_ranges,
|
| 901 |
+
file=file_desc,
|
| 902 |
+
serialized_start=None,
|
| 903 |
+
serialized_end=None,
|
| 904 |
+
syntax=syntax,
|
| 905 |
+
# pylint: disable=protected-access
|
| 906 |
+
create_key=descriptor._internal_create_key)
|
| 907 |
+
for nested in desc.nested_types:
|
| 908 |
+
nested.containing_type = desc
|
| 909 |
+
for enum in desc.enum_types:
|
| 910 |
+
enum.containing_type = desc
|
| 911 |
+
for field_index, field_desc in enumerate(desc_proto.field):
|
| 912 |
+
if field_desc.HasField('oneof_index'):
|
| 913 |
+
oneof_index = field_desc.oneof_index
|
| 914 |
+
oneofs[oneof_index].fields.append(fields[field_index])
|
| 915 |
+
fields[field_index].containing_oneof = oneofs[oneof_index]
|
| 916 |
+
|
| 917 |
+
scope[_PrefixWithDot(desc_name)] = desc
|
| 918 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 919 |
+
self._descriptors[desc_name] = desc
|
| 920 |
+
return desc
|
| 921 |
+
|
| 922 |
+
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
|
| 923 |
+
containing_type=None, scope=None, top_level=False):
|
| 924 |
+
"""Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
|
| 925 |
+
|
| 926 |
+
Args:
|
| 927 |
+
enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
|
| 928 |
+
package: Optional package name for the new message EnumDescriptor.
|
| 929 |
+
file_desc: The file containing the enum descriptor.
|
| 930 |
+
containing_type: The type containing this enum.
|
| 931 |
+
scope: Scope containing available types.
|
| 932 |
+
top_level: If True, the enum is a top level symbol. If False, the enum
|
| 933 |
+
is defined inside a message.
|
| 934 |
+
|
| 935 |
+
Returns:
|
| 936 |
+
The added descriptor
|
| 937 |
+
"""
|
| 938 |
+
|
| 939 |
+
if package:
|
| 940 |
+
enum_name = '.'.join((package, enum_proto.name))
|
| 941 |
+
else:
|
| 942 |
+
enum_name = enum_proto.name
|
| 943 |
+
|
| 944 |
+
if file_desc is None:
|
| 945 |
+
file_name = None
|
| 946 |
+
else:
|
| 947 |
+
file_name = file_desc.name
|
| 948 |
+
|
| 949 |
+
values = [self._MakeEnumValueDescriptor(value, index)
|
| 950 |
+
for index, value in enumerate(enum_proto.value)]
|
| 951 |
+
desc = descriptor.EnumDescriptor(name=enum_proto.name,
|
| 952 |
+
full_name=enum_name,
|
| 953 |
+
filename=file_name,
|
| 954 |
+
file=file_desc,
|
| 955 |
+
values=values,
|
| 956 |
+
containing_type=containing_type,
|
| 957 |
+
options=_OptionsOrNone(enum_proto),
|
| 958 |
+
# pylint: disable=protected-access
|
| 959 |
+
create_key=descriptor._internal_create_key)
|
| 960 |
+
scope['.%s' % enum_name] = desc
|
| 961 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 962 |
+
self._enum_descriptors[enum_name] = desc
|
| 963 |
+
|
| 964 |
+
# Add top level enum values.
|
| 965 |
+
if top_level:
|
| 966 |
+
for value in values:
|
| 967 |
+
full_name = _NormalizeFullyQualifiedName(
|
| 968 |
+
'.'.join((package, value.name)))
|
| 969 |
+
self._CheckConflictRegister(value, full_name, file_name)
|
| 970 |
+
self._top_enum_values[full_name] = value
|
| 971 |
+
|
| 972 |
+
return desc
|
| 973 |
+
|
| 974 |
+
def _MakeFieldDescriptor(self, field_proto, message_name, index,
|
| 975 |
+
file_desc, is_extension=False):
|
| 976 |
+
"""Creates a field descriptor from a FieldDescriptorProto.
|
| 977 |
+
|
| 978 |
+
For message and enum type fields, this method will do a look up
|
| 979 |
+
in the pool for the appropriate descriptor for that type. If it
|
| 980 |
+
is unavailable, it will fall back to the _source function to
|
| 981 |
+
create it. If this type is still unavailable, construction will
|
| 982 |
+
fail.
|
| 983 |
+
|
| 984 |
+
Args:
|
| 985 |
+
field_proto: The proto describing the field.
|
| 986 |
+
message_name: The name of the containing message.
|
| 987 |
+
index: Index of the field
|
| 988 |
+
file_desc: The file containing the field descriptor.
|
| 989 |
+
is_extension: Indication that this field is for an extension.
|
| 990 |
+
|
| 991 |
+
Returns:
|
| 992 |
+
An initialized FieldDescriptor object
|
| 993 |
+
"""
|
| 994 |
+
|
| 995 |
+
if message_name:
|
| 996 |
+
full_name = '.'.join((message_name, field_proto.name))
|
| 997 |
+
else:
|
| 998 |
+
full_name = field_proto.name
|
| 999 |
+
|
| 1000 |
+
if field_proto.json_name:
|
| 1001 |
+
json_name = field_proto.json_name
|
| 1002 |
+
else:
|
| 1003 |
+
json_name = None
|
| 1004 |
+
|
| 1005 |
+
return descriptor.FieldDescriptor(
|
| 1006 |
+
name=field_proto.name,
|
| 1007 |
+
full_name=full_name,
|
| 1008 |
+
index=index,
|
| 1009 |
+
number=field_proto.number,
|
| 1010 |
+
type=field_proto.type,
|
| 1011 |
+
cpp_type=None,
|
| 1012 |
+
message_type=None,
|
| 1013 |
+
enum_type=None,
|
| 1014 |
+
containing_type=None,
|
| 1015 |
+
label=field_proto.label,
|
| 1016 |
+
has_default_value=False,
|
| 1017 |
+
default_value=None,
|
| 1018 |
+
is_extension=is_extension,
|
| 1019 |
+
extension_scope=None,
|
| 1020 |
+
options=_OptionsOrNone(field_proto),
|
| 1021 |
+
json_name=json_name,
|
| 1022 |
+
file=file_desc,
|
| 1023 |
+
# pylint: disable=protected-access
|
| 1024 |
+
create_key=descriptor._internal_create_key)
|
| 1025 |
+
|
| 1026 |
+
def _SetAllFieldTypes(self, package, desc_proto, scope):
|
| 1027 |
+
"""Sets all the descriptor's fields's types.
|
| 1028 |
+
|
| 1029 |
+
This method also sets the containing types on any extensions.
|
| 1030 |
+
|
| 1031 |
+
Args:
|
| 1032 |
+
package: The current package of desc_proto.
|
| 1033 |
+
desc_proto: The message descriptor to update.
|
| 1034 |
+
scope: Enclosing scope of available types.
|
| 1035 |
+
"""
|
| 1036 |
+
|
| 1037 |
+
package = _PrefixWithDot(package)
|
| 1038 |
+
|
| 1039 |
+
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
|
| 1040 |
+
|
| 1041 |
+
if package == '.':
|
| 1042 |
+
nested_package = _PrefixWithDot(desc_proto.name)
|
| 1043 |
+
else:
|
| 1044 |
+
nested_package = '.'.join([package, desc_proto.name])
|
| 1045 |
+
|
| 1046 |
+
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
|
| 1047 |
+
self._SetFieldType(field_proto, field_desc, nested_package, scope)
|
| 1048 |
+
|
| 1049 |
+
for extension_proto, extension_desc in (
|
| 1050 |
+
zip(desc_proto.extension, main_desc.extensions)):
|
| 1051 |
+
extension_desc.containing_type = self._GetTypeFromScope(
|
| 1052 |
+
nested_package, extension_proto.extendee, scope)
|
| 1053 |
+
self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
|
| 1054 |
+
|
| 1055 |
+
for nested_type in desc_proto.nested_type:
|
| 1056 |
+
self._SetAllFieldTypes(nested_package, nested_type, scope)
|
| 1057 |
+
|
| 1058 |
+
def _SetFieldType(self, field_proto, field_desc, package, scope):
|
| 1059 |
+
"""Sets the field's type, cpp_type, message_type and enum_type.
|
| 1060 |
+
|
| 1061 |
+
Args:
|
| 1062 |
+
field_proto: Data about the field in proto format.
|
| 1063 |
+
field_desc: The descriptor to modify.
|
| 1064 |
+
package: The package the field's container is in.
|
| 1065 |
+
scope: Enclosing scope of available types.
|
| 1066 |
+
"""
|
| 1067 |
+
if field_proto.type_name:
|
| 1068 |
+
desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
|
| 1069 |
+
else:
|
| 1070 |
+
desc = None
|
| 1071 |
+
|
| 1072 |
+
if not field_proto.HasField('type'):
|
| 1073 |
+
if isinstance(desc, descriptor.Descriptor):
|
| 1074 |
+
field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
|
| 1075 |
+
else:
|
| 1076 |
+
field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
|
| 1077 |
+
|
| 1078 |
+
field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
|
| 1079 |
+
field_proto.type)
|
| 1080 |
+
|
| 1081 |
+
if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
|
| 1082 |
+
or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
|
| 1083 |
+
field_desc.message_type = desc
|
| 1084 |
+
|
| 1085 |
+
if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
| 1086 |
+
field_desc.enum_type = desc
|
| 1087 |
+
|
| 1088 |
+
if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
| 1089 |
+
field_desc.has_default_value = False
|
| 1090 |
+
field_desc.default_value = []
|
| 1091 |
+
elif field_proto.HasField('default_value'):
|
| 1092 |
+
field_desc.has_default_value = True
|
| 1093 |
+
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
| 1094 |
+
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
| 1095 |
+
field_desc.default_value = float(field_proto.default_value)
|
| 1096 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
| 1097 |
+
field_desc.default_value = field_proto.default_value
|
| 1098 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
| 1099 |
+
field_desc.default_value = field_proto.default_value.lower() == 'true'
|
| 1100 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
| 1101 |
+
field_desc.default_value = field_desc.enum_type.values_by_name[
|
| 1102 |
+
field_proto.default_value].number
|
| 1103 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
| 1104 |
+
field_desc.default_value = text_encoding.CUnescape(
|
| 1105 |
+
field_proto.default_value)
|
| 1106 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
|
| 1107 |
+
field_desc.default_value = None
|
| 1108 |
+
else:
|
| 1109 |
+
# All other types are of the "int" type.
|
| 1110 |
+
field_desc.default_value = int(field_proto.default_value)
|
| 1111 |
+
else:
|
| 1112 |
+
field_desc.has_default_value = False
|
| 1113 |
+
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
| 1114 |
+
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
| 1115 |
+
field_desc.default_value = 0.0
|
| 1116 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
| 1117 |
+
field_desc.default_value = u''
|
| 1118 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
| 1119 |
+
field_desc.default_value = False
|
| 1120 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
| 1121 |
+
field_desc.default_value = field_desc.enum_type.values[0].number
|
| 1122 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
| 1123 |
+
field_desc.default_value = b''
|
| 1124 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
|
| 1125 |
+
field_desc.default_value = None
|
| 1126 |
+
elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
|
| 1127 |
+
field_desc.default_value = None
|
| 1128 |
+
else:
|
| 1129 |
+
# All other types are of the "int" type.
|
| 1130 |
+
field_desc.default_value = 0
|
| 1131 |
+
|
| 1132 |
+
field_desc.type = field_proto.type
|
| 1133 |
+
|
| 1134 |
+
def _MakeEnumValueDescriptor(self, value_proto, index):
|
| 1135 |
+
"""Creates a enum value descriptor object from a enum value proto.
|
| 1136 |
+
|
| 1137 |
+
Args:
|
| 1138 |
+
value_proto: The proto describing the enum value.
|
| 1139 |
+
index: The index of the enum value.
|
| 1140 |
+
|
| 1141 |
+
Returns:
|
| 1142 |
+
An initialized EnumValueDescriptor object.
|
| 1143 |
+
"""
|
| 1144 |
+
|
| 1145 |
+
return descriptor.EnumValueDescriptor(
|
| 1146 |
+
name=value_proto.name,
|
| 1147 |
+
index=index,
|
| 1148 |
+
number=value_proto.number,
|
| 1149 |
+
options=_OptionsOrNone(value_proto),
|
| 1150 |
+
type=None,
|
| 1151 |
+
# pylint: disable=protected-access
|
| 1152 |
+
create_key=descriptor._internal_create_key)
|
| 1153 |
+
|
| 1154 |
+
def _MakeServiceDescriptor(self, service_proto, service_index, scope,
|
| 1155 |
+
package, file_desc):
|
| 1156 |
+
"""Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
|
| 1157 |
+
|
| 1158 |
+
Args:
|
| 1159 |
+
service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
|
| 1160 |
+
service_index: The index of the service in the File.
|
| 1161 |
+
scope: Dict mapping short and full symbols to message and enum types.
|
| 1162 |
+
package: Optional package name for the new message EnumDescriptor.
|
| 1163 |
+
file_desc: The file containing the service descriptor.
|
| 1164 |
+
|
| 1165 |
+
Returns:
|
| 1166 |
+
The added descriptor.
|
| 1167 |
+
"""
|
| 1168 |
+
|
| 1169 |
+
if package:
|
| 1170 |
+
service_name = '.'.join((package, service_proto.name))
|
| 1171 |
+
else:
|
| 1172 |
+
service_name = service_proto.name
|
| 1173 |
+
|
| 1174 |
+
methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
|
| 1175 |
+
scope, index)
|
| 1176 |
+
for index, method_proto in enumerate(service_proto.method)]
|
| 1177 |
+
desc = descriptor.ServiceDescriptor(
|
| 1178 |
+
name=service_proto.name,
|
| 1179 |
+
full_name=service_name,
|
| 1180 |
+
index=service_index,
|
| 1181 |
+
methods=methods,
|
| 1182 |
+
options=_OptionsOrNone(service_proto),
|
| 1183 |
+
file=file_desc,
|
| 1184 |
+
# pylint: disable=protected-access
|
| 1185 |
+
create_key=descriptor._internal_create_key)
|
| 1186 |
+
self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
|
| 1187 |
+
self._service_descriptors[service_name] = desc
|
| 1188 |
+
return desc
|
| 1189 |
+
|
| 1190 |
+
def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
|
| 1191 |
+
index):
|
| 1192 |
+
"""Creates a method descriptor from a MethodDescriptorProto.
|
| 1193 |
+
|
| 1194 |
+
Args:
|
| 1195 |
+
method_proto: The proto describing the method.
|
| 1196 |
+
service_name: The name of the containing service.
|
| 1197 |
+
package: Optional package name to look up for types.
|
| 1198 |
+
scope: Scope containing available types.
|
| 1199 |
+
index: Index of the method in the service.
|
| 1200 |
+
|
| 1201 |
+
Returns:
|
| 1202 |
+
An initialized MethodDescriptor object.
|
| 1203 |
+
"""
|
| 1204 |
+
full_name = '.'.join((service_name, method_proto.name))
|
| 1205 |
+
input_type = self._GetTypeFromScope(
|
| 1206 |
+
package, method_proto.input_type, scope)
|
| 1207 |
+
output_type = self._GetTypeFromScope(
|
| 1208 |
+
package, method_proto.output_type, scope)
|
| 1209 |
+
return descriptor.MethodDescriptor(
|
| 1210 |
+
name=method_proto.name,
|
| 1211 |
+
full_name=full_name,
|
| 1212 |
+
index=index,
|
| 1213 |
+
containing_service=None,
|
| 1214 |
+
input_type=input_type,
|
| 1215 |
+
output_type=output_type,
|
| 1216 |
+
client_streaming=method_proto.client_streaming,
|
| 1217 |
+
server_streaming=method_proto.server_streaming,
|
| 1218 |
+
options=_OptionsOrNone(method_proto),
|
| 1219 |
+
# pylint: disable=protected-access
|
| 1220 |
+
create_key=descriptor._internal_create_key)
|
| 1221 |
+
|
| 1222 |
+
def _ExtractSymbols(self, descriptors):
|
| 1223 |
+
"""Pulls out all the symbols from descriptor protos.
|
| 1224 |
+
|
| 1225 |
+
Args:
|
| 1226 |
+
descriptors: The messages to extract descriptors from.
|
| 1227 |
+
Yields:
|
| 1228 |
+
A two element tuple of the type name and descriptor object.
|
| 1229 |
+
"""
|
| 1230 |
+
|
| 1231 |
+
for desc in descriptors:
|
| 1232 |
+
yield (_PrefixWithDot(desc.full_name), desc)
|
| 1233 |
+
for symbol in self._ExtractSymbols(desc.nested_types):
|
| 1234 |
+
yield symbol
|
| 1235 |
+
for enum in desc.enum_types:
|
| 1236 |
+
yield (_PrefixWithDot(enum.full_name), enum)
|
| 1237 |
+
|
| 1238 |
+
def _GetDeps(self, dependencies, visited=None):
|
| 1239 |
+
"""Recursively finds dependencies for file protos.
|
| 1240 |
+
|
| 1241 |
+
Args:
|
| 1242 |
+
dependencies: The names of the files being depended on.
|
| 1243 |
+
visited: The names of files already found.
|
| 1244 |
+
|
| 1245 |
+
Yields:
|
| 1246 |
+
Each direct and indirect dependency.
|
| 1247 |
+
"""
|
| 1248 |
+
|
| 1249 |
+
visited = visited or set()
|
| 1250 |
+
for dependency in dependencies:
|
| 1251 |
+
if dependency not in visited:
|
| 1252 |
+
visited.add(dependency)
|
| 1253 |
+
dep_desc = self.FindFileByName(dependency)
|
| 1254 |
+
yield dep_desc
|
| 1255 |
+
public_files = [d.name for d in dep_desc.public_dependencies]
|
| 1256 |
+
yield from self._GetDeps(public_files, visited)
|
| 1257 |
+
|
| 1258 |
+
def _GetTypeFromScope(self, package, type_name, scope):
|
| 1259 |
+
"""Finds a given type name in the current scope.
|
| 1260 |
+
|
| 1261 |
+
Args:
|
| 1262 |
+
package: The package the proto should be located in.
|
| 1263 |
+
type_name: The name of the type to be found in the scope.
|
| 1264 |
+
scope: Dict mapping short and full symbols to message and enum types.
|
| 1265 |
+
|
| 1266 |
+
Returns:
|
| 1267 |
+
The descriptor for the requested type.
|
| 1268 |
+
"""
|
| 1269 |
+
if type_name not in scope:
|
| 1270 |
+
components = _PrefixWithDot(package).split('.')
|
| 1271 |
+
while components:
|
| 1272 |
+
possible_match = '.'.join(components + [type_name])
|
| 1273 |
+
if possible_match in scope:
|
| 1274 |
+
type_name = possible_match
|
| 1275 |
+
break
|
| 1276 |
+
else:
|
| 1277 |
+
components.pop(-1)
|
| 1278 |
+
return scope[type_name]
|
| 1279 |
+
|
| 1280 |
+
|
| 1281 |
+
def _PrefixWithDot(name):
|
| 1282 |
+
return name if name.startswith('.') else '.%s' % name
|
| 1283 |
+
|
| 1284 |
+
|
| 1285 |
+
if _USE_C_DESCRIPTORS:
|
| 1286 |
+
# TODO(amauryfa): This pool could be constructed from Python code, when we
|
| 1287 |
+
# support a flag like 'use_cpp_generated_pool=True'.
|
| 1288 |
+
# pylint: disable=protected-access
|
| 1289 |
+
_DEFAULT = descriptor._message.default_pool
|
| 1290 |
+
else:
|
| 1291 |
+
_DEFAULT = DescriptorPool()
|
| 1292 |
+
|
| 1293 |
+
|
| 1294 |
+
def Default():
|
| 1295 |
+
return _DEFAULT
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/duration_pb2.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/duration.proto
|
| 4 |
+
"""Generated protocol buffer code."""
|
| 5 |
+
from google.protobuf.internal import builder as _builder
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
# @@protoc_insertion_point(imports)
|
| 10 |
+
|
| 11 |
+
_sym_db = _symbol_database.Default()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 17 |
+
|
| 18 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
| 19 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals())
|
| 20 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 21 |
+
|
| 22 |
+
DESCRIPTOR._options = None
|
| 23 |
+
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 24 |
+
_DURATION._serialized_start=51
|
| 25 |
+
_DURATION._serialized_end=93
|
| 26 |
+
# @@protoc_insertion_point(module_scope)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/empty_pb2.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/empty.proto
|
| 4 |
+
"""Generated protocol buffer code."""
|
| 5 |
+
from google.protobuf.internal import builder as _builder
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
# @@protoc_insertion_point(imports)
|
| 10 |
+
|
| 11 |
+
_sym_db = _symbol_database.Default()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 17 |
+
|
| 18 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
| 19 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals())
|
| 20 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 21 |
+
|
| 22 |
+
DESCRIPTOR._options = None
|
| 23 |
+
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 24 |
+
_EMPTY._serialized_start=48
|
| 25 |
+
_EMPTY._serialized_end=55
|
| 26 |
+
# @@protoc_insertion_point(module_scope)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/field_mask_pb2.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
| 3 |
+
# source: google/protobuf/field_mask.proto
|
| 4 |
+
"""Generated protocol buffer code."""
|
| 5 |
+
from google.protobuf.internal import builder as _builder
|
| 6 |
+
from google.protobuf import descriptor as _descriptor
|
| 7 |
+
from google.protobuf import descriptor_pool as _descriptor_pool
|
| 8 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 9 |
+
# @@protoc_insertion_point(imports)
|
| 10 |
+
|
| 11 |
+
_sym_db = _symbol_database.Default()
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
| 17 |
+
|
| 18 |
+
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
|
| 19 |
+
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals())
|
| 20 |
+
if _descriptor._USE_C_DESCRIPTORS == False:
|
| 21 |
+
|
| 22 |
+
DESCRIPTOR._options = None
|
| 23 |
+
DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
|
| 24 |
+
_FIELDMASK._serialized_start=53
|
| 25 |
+
_FIELDMASK._serialized_end=79
|
| 26 |
+
# @@protoc_insertion_point(module_scope)
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/__init__.py
ADDED
|
File without changes
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/api_implementation.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Determine which implementation of the protobuf API is used in this process.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
import os
|
| 35 |
+
import sys
|
| 36 |
+
import warnings
|
| 37 |
+
|
| 38 |
+
try:
|
| 39 |
+
# pylint: disable=g-import-not-at-top
|
| 40 |
+
from google.protobuf.internal import _api_implementation
|
| 41 |
+
# The compile-time constants in the _api_implementation module can be used to
|
| 42 |
+
# switch to a certain implementation of the Python API at build time.
|
| 43 |
+
_api_version = _api_implementation.api_version
|
| 44 |
+
except ImportError:
|
| 45 |
+
_api_version = -1 # Unspecified by compiler flags.
|
| 46 |
+
|
| 47 |
+
if _api_version == 1:
|
| 48 |
+
raise ValueError('api_version=1 is no longer supported.')
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
_default_implementation_type = ('cpp' if _api_version > 0 else 'python')
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
# This environment variable can be used to switch to a certain implementation
|
| 55 |
+
# of the Python API, overriding the compile-time constants in the
|
| 56 |
+
# _api_implementation module. Right now only 'python' and 'cpp' are valid
|
| 57 |
+
# values. Any other value will be ignored.
|
| 58 |
+
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
| 59 |
+
_default_implementation_type)
|
| 60 |
+
|
| 61 |
+
if _implementation_type != 'python':
|
| 62 |
+
_implementation_type = 'cpp'
|
| 63 |
+
|
| 64 |
+
if 'PyPy' in sys.version and _implementation_type == 'cpp':
|
| 65 |
+
warnings.warn('PyPy does not work yet with cpp protocol buffers. '
|
| 66 |
+
'Falling back to the python implementation.')
|
| 67 |
+
_implementation_type = 'python'
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
# Detect if serialization should be deterministic by default
|
| 71 |
+
try:
|
| 72 |
+
# The presence of this module in a build allows the proto implementation to
|
| 73 |
+
# be upgraded merely via build deps.
|
| 74 |
+
#
|
| 75 |
+
# NOTE: Merely importing this automatically enables deterministic proto
|
| 76 |
+
# serialization for C++ code, but we still need to export it as a boolean so
|
| 77 |
+
# that we can do the same for `_implementation_type == 'python'`.
|
| 78 |
+
#
|
| 79 |
+
# NOTE2: It is possible for C++ code to enable deterministic serialization by
|
| 80 |
+
# default _without_ affecting Python code, if the C++ implementation is not in
|
| 81 |
+
# use by this module. That is intended behavior, so we don't actually expose
|
| 82 |
+
# this boolean outside of this module.
|
| 83 |
+
#
|
| 84 |
+
# pylint: disable=g-import-not-at-top,unused-import
|
| 85 |
+
from google.protobuf import enable_deterministic_proto_serialization
|
| 86 |
+
_python_deterministic_proto_serialization = True
|
| 87 |
+
except ImportError:
|
| 88 |
+
_python_deterministic_proto_serialization = False
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
# Usage of this function is discouraged. Clients shouldn't care which
|
| 92 |
+
# implementation of the API is in use. Note that there is no guarantee
|
| 93 |
+
# that differences between APIs will be maintained.
|
| 94 |
+
# Please don't use this function if possible.
|
| 95 |
+
def Type():
|
| 96 |
+
return _implementation_type
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _SetType(implementation_type):
|
| 100 |
+
"""Never use! Only for protobuf benchmark."""
|
| 101 |
+
global _implementation_type
|
| 102 |
+
_implementation_type = implementation_type
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# See comment on 'Type' above.
|
| 106 |
+
def Version():
|
| 107 |
+
return 2
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
# For internal use only
|
| 111 |
+
def IsPythonDefaultSerializationDeterministic():
|
| 112 |
+
return _python_deterministic_proto_serialization
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/builder.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Builds descriptors, message classes and services for generated _pb2.py.
|
| 32 |
+
|
| 33 |
+
This file is only called in python generated _pb2.py files. It builds
|
| 34 |
+
descriptors, message classes and services that users can directly use
|
| 35 |
+
in generated code.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
__author__ = 'jieluo@google.com (Jie Luo)'
|
| 39 |
+
|
| 40 |
+
from google.protobuf.internal import enum_type_wrapper
|
| 41 |
+
from google.protobuf import message as _message
|
| 42 |
+
from google.protobuf import reflection as _reflection
|
| 43 |
+
from google.protobuf import symbol_database as _symbol_database
|
| 44 |
+
|
| 45 |
+
_sym_db = _symbol_database.Default()
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
def BuildMessageAndEnumDescriptors(file_des, module):
|
| 49 |
+
"""Builds message and enum descriptors.
|
| 50 |
+
|
| 51 |
+
Args:
|
| 52 |
+
file_des: FileDescriptor of the .proto file
|
| 53 |
+
module: Generated _pb2 module
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
def BuildNestedDescriptors(msg_des, prefix):
|
| 57 |
+
for (name, nested_msg) in msg_des.nested_types_by_name.items():
|
| 58 |
+
module_name = prefix + name.upper()
|
| 59 |
+
module[module_name] = nested_msg
|
| 60 |
+
BuildNestedDescriptors(nested_msg, module_name + '_')
|
| 61 |
+
for enum_des in msg_des.enum_types:
|
| 62 |
+
module[prefix + enum_des.name.upper()] = enum_des
|
| 63 |
+
|
| 64 |
+
for (name, msg_des) in file_des.message_types_by_name.items():
|
| 65 |
+
module_name = '_' + name.upper()
|
| 66 |
+
module[module_name] = msg_des
|
| 67 |
+
BuildNestedDescriptors(msg_des, module_name + '_')
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def BuildTopDescriptorsAndMessages(file_des, module_name, module):
|
| 71 |
+
"""Builds top level descriptors and message classes.
|
| 72 |
+
|
| 73 |
+
Args:
|
| 74 |
+
file_des: FileDescriptor of the .proto file
|
| 75 |
+
module_name: str, the name of generated _pb2 module
|
| 76 |
+
module: Generated _pb2 module
|
| 77 |
+
"""
|
| 78 |
+
|
| 79 |
+
def BuildMessage(msg_des):
|
| 80 |
+
create_dict = {}
|
| 81 |
+
for (name, nested_msg) in msg_des.nested_types_by_name.items():
|
| 82 |
+
create_dict[name] = BuildMessage(nested_msg)
|
| 83 |
+
create_dict['DESCRIPTOR'] = msg_des
|
| 84 |
+
create_dict['__module__'] = module_name
|
| 85 |
+
message_class = _reflection.GeneratedProtocolMessageType(
|
| 86 |
+
msg_des.name, (_message.Message,), create_dict)
|
| 87 |
+
_sym_db.RegisterMessage(message_class)
|
| 88 |
+
return message_class
|
| 89 |
+
|
| 90 |
+
# top level enums
|
| 91 |
+
for (name, enum_des) in file_des.enum_types_by_name.items():
|
| 92 |
+
module['_' + name.upper()] = enum_des
|
| 93 |
+
module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
|
| 94 |
+
for enum_value in enum_des.values:
|
| 95 |
+
module[enum_value.name] = enum_value.number
|
| 96 |
+
|
| 97 |
+
# top level extensions
|
| 98 |
+
for (name, extension_des) in file_des.extensions_by_name.items():
|
| 99 |
+
module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
|
| 100 |
+
module[name] = extension_des
|
| 101 |
+
|
| 102 |
+
# services
|
| 103 |
+
for (name, service) in file_des.services_by_name.items():
|
| 104 |
+
module['_' + name.upper()] = service
|
| 105 |
+
|
| 106 |
+
# Build messages.
|
| 107 |
+
for (name, msg_des) in file_des.message_types_by_name.items():
|
| 108 |
+
module[name] = BuildMessage(msg_des)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def BuildServices(file_des, module_name, module):
|
| 112 |
+
"""Builds services classes and services stub class.
|
| 113 |
+
|
| 114 |
+
Args:
|
| 115 |
+
file_des: FileDescriptor of the .proto file
|
| 116 |
+
module_name: str, the name of generated _pb2 module
|
| 117 |
+
module: Generated _pb2 module
|
| 118 |
+
"""
|
| 119 |
+
# pylint: disable=g-import-not-at-top
|
| 120 |
+
from google.protobuf import service as _service
|
| 121 |
+
from google.protobuf import service_reflection
|
| 122 |
+
# pylint: enable=g-import-not-at-top
|
| 123 |
+
for (name, service) in file_des.services_by_name.items():
|
| 124 |
+
module[name] = service_reflection.GeneratedServiceType(
|
| 125 |
+
name, (_service.Service,),
|
| 126 |
+
dict(DESCRIPTOR=service, __module__=module_name))
|
| 127 |
+
stub_name = name + '_Stub'
|
| 128 |
+
module[stub_name] = service_reflection.GeneratedServiceStubType(
|
| 129 |
+
stub_name, (module[name],),
|
| 130 |
+
dict(DESCRIPTOR=service, __module__=module_name))
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/containers.py
ADDED
|
@@ -0,0 +1,710 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Contains container classes to represent different protocol buffer types.
|
| 32 |
+
|
| 33 |
+
This file defines container classes which represent categories of protocol
|
| 34 |
+
buffer field types which need extra maintenance. Currently these categories
|
| 35 |
+
are:
|
| 36 |
+
|
| 37 |
+
- Repeated scalar fields - These are all repeated fields which aren't
|
| 38 |
+
composite (e.g. they are of simple types like int32, string, etc).
|
| 39 |
+
- Repeated composite fields - Repeated fields which are composite. This
|
| 40 |
+
includes groups and nested messages.
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
import collections.abc
|
| 44 |
+
import copy
|
| 45 |
+
import pickle
|
| 46 |
+
from typing import (
|
| 47 |
+
Any,
|
| 48 |
+
Iterable,
|
| 49 |
+
Iterator,
|
| 50 |
+
List,
|
| 51 |
+
MutableMapping,
|
| 52 |
+
MutableSequence,
|
| 53 |
+
NoReturn,
|
| 54 |
+
Optional,
|
| 55 |
+
Sequence,
|
| 56 |
+
TypeVar,
|
| 57 |
+
Union,
|
| 58 |
+
overload,
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
_T = TypeVar('_T')
|
| 63 |
+
_K = TypeVar('_K')
|
| 64 |
+
_V = TypeVar('_V')
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class BaseContainer(Sequence[_T]):
|
| 68 |
+
"""Base container class."""
|
| 69 |
+
|
| 70 |
+
# Minimizes memory usage and disallows assignment to other attributes.
|
| 71 |
+
__slots__ = ['_message_listener', '_values']
|
| 72 |
+
|
| 73 |
+
def __init__(self, message_listener: Any) -> None:
|
| 74 |
+
"""
|
| 75 |
+
Args:
|
| 76 |
+
message_listener: A MessageListener implementation.
|
| 77 |
+
The RepeatedScalarFieldContainer will call this object's
|
| 78 |
+
Modified() method when it is modified.
|
| 79 |
+
"""
|
| 80 |
+
self._message_listener = message_listener
|
| 81 |
+
self._values = []
|
| 82 |
+
|
| 83 |
+
@overload
|
| 84 |
+
def __getitem__(self, key: int) -> _T:
|
| 85 |
+
...
|
| 86 |
+
|
| 87 |
+
@overload
|
| 88 |
+
def __getitem__(self, key: slice) -> List[_T]:
|
| 89 |
+
...
|
| 90 |
+
|
| 91 |
+
def __getitem__(self, key):
|
| 92 |
+
"""Retrieves item by the specified key."""
|
| 93 |
+
return self._values[key]
|
| 94 |
+
|
| 95 |
+
def __len__(self) -> int:
|
| 96 |
+
"""Returns the number of elements in the container."""
|
| 97 |
+
return len(self._values)
|
| 98 |
+
|
| 99 |
+
def __ne__(self, other: Any) -> bool:
|
| 100 |
+
"""Checks if another instance isn't equal to this one."""
|
| 101 |
+
# The concrete classes should define __eq__.
|
| 102 |
+
return not self == other
|
| 103 |
+
|
| 104 |
+
__hash__ = None
|
| 105 |
+
|
| 106 |
+
def __repr__(self) -> str:
|
| 107 |
+
return repr(self._values)
|
| 108 |
+
|
| 109 |
+
def sort(self, *args, **kwargs) -> None:
|
| 110 |
+
# Continue to support the old sort_function keyword argument.
|
| 111 |
+
# This is expected to be a rare occurrence, so use LBYL to avoid
|
| 112 |
+
# the overhead of actually catching KeyError.
|
| 113 |
+
if 'sort_function' in kwargs:
|
| 114 |
+
kwargs['cmp'] = kwargs.pop('sort_function')
|
| 115 |
+
self._values.sort(*args, **kwargs)
|
| 116 |
+
|
| 117 |
+
def reverse(self) -> None:
|
| 118 |
+
self._values.reverse()
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
# TODO(slebedev): Remove this. BaseContainer does *not* conform to
|
| 122 |
+
# MutableSequence, only its subclasses do.
|
| 123 |
+
collections.abc.MutableSequence.register(BaseContainer)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
|
| 127 |
+
"""Simple, type-checked, list-like container for holding repeated scalars."""
|
| 128 |
+
|
| 129 |
+
# Disallows assignment to other attributes.
|
| 130 |
+
__slots__ = ['_type_checker']
|
| 131 |
+
|
| 132 |
+
def __init__(
|
| 133 |
+
self,
|
| 134 |
+
message_listener: Any,
|
| 135 |
+
type_checker: Any,
|
| 136 |
+
) -> None:
|
| 137 |
+
"""Args:
|
| 138 |
+
|
| 139 |
+
message_listener: A MessageListener implementation. The
|
| 140 |
+
RepeatedScalarFieldContainer will call this object's Modified() method
|
| 141 |
+
when it is modified.
|
| 142 |
+
type_checker: A type_checkers.ValueChecker instance to run on elements
|
| 143 |
+
inserted into this container.
|
| 144 |
+
"""
|
| 145 |
+
super().__init__(message_listener)
|
| 146 |
+
self._type_checker = type_checker
|
| 147 |
+
|
| 148 |
+
def append(self, value: _T) -> None:
|
| 149 |
+
"""Appends an item to the list. Similar to list.append()."""
|
| 150 |
+
self._values.append(self._type_checker.CheckValue(value))
|
| 151 |
+
if not self._message_listener.dirty:
|
| 152 |
+
self._message_listener.Modified()
|
| 153 |
+
|
| 154 |
+
def insert(self, key: int, value: _T) -> None:
|
| 155 |
+
"""Inserts the item at the specified position. Similar to list.insert()."""
|
| 156 |
+
self._values.insert(key, self._type_checker.CheckValue(value))
|
| 157 |
+
if not self._message_listener.dirty:
|
| 158 |
+
self._message_listener.Modified()
|
| 159 |
+
|
| 160 |
+
def extend(self, elem_seq: Iterable[_T]) -> None:
|
| 161 |
+
"""Extends by appending the given iterable. Similar to list.extend()."""
|
| 162 |
+
if elem_seq is None:
|
| 163 |
+
return
|
| 164 |
+
try:
|
| 165 |
+
elem_seq_iter = iter(elem_seq)
|
| 166 |
+
except TypeError:
|
| 167 |
+
if not elem_seq:
|
| 168 |
+
# silently ignore falsy inputs :-/.
|
| 169 |
+
# TODO(ptucker): Deprecate this behavior. b/18413862
|
| 170 |
+
return
|
| 171 |
+
raise
|
| 172 |
+
|
| 173 |
+
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
|
| 174 |
+
if new_values:
|
| 175 |
+
self._values.extend(new_values)
|
| 176 |
+
self._message_listener.Modified()
|
| 177 |
+
|
| 178 |
+
def MergeFrom(
|
| 179 |
+
self,
|
| 180 |
+
other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
|
| 181 |
+
) -> None:
|
| 182 |
+
"""Appends the contents of another repeated field of the same type to this
|
| 183 |
+
one. We do not check the types of the individual fields.
|
| 184 |
+
"""
|
| 185 |
+
self._values.extend(other)
|
| 186 |
+
self._message_listener.Modified()
|
| 187 |
+
|
| 188 |
+
def remove(self, elem: _T):
|
| 189 |
+
"""Removes an item from the list. Similar to list.remove()."""
|
| 190 |
+
self._values.remove(elem)
|
| 191 |
+
self._message_listener.Modified()
|
| 192 |
+
|
| 193 |
+
def pop(self, key: Optional[int] = -1) -> _T:
|
| 194 |
+
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
| 195 |
+
value = self._values[key]
|
| 196 |
+
self.__delitem__(key)
|
| 197 |
+
return value
|
| 198 |
+
|
| 199 |
+
@overload
|
| 200 |
+
def __setitem__(self, key: int, value: _T) -> None:
|
| 201 |
+
...
|
| 202 |
+
|
| 203 |
+
@overload
|
| 204 |
+
def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
|
| 205 |
+
...
|
| 206 |
+
|
| 207 |
+
def __setitem__(self, key, value) -> None:
|
| 208 |
+
"""Sets the item on the specified position."""
|
| 209 |
+
if isinstance(key, slice):
|
| 210 |
+
if key.step is not None:
|
| 211 |
+
raise ValueError('Extended slices not supported')
|
| 212 |
+
self._values[key] = map(self._type_checker.CheckValue, value)
|
| 213 |
+
self._message_listener.Modified()
|
| 214 |
+
else:
|
| 215 |
+
self._values[key] = self._type_checker.CheckValue(value)
|
| 216 |
+
self._message_listener.Modified()
|
| 217 |
+
|
| 218 |
+
def __delitem__(self, key: Union[int, slice]) -> None:
|
| 219 |
+
"""Deletes the item at the specified position."""
|
| 220 |
+
del self._values[key]
|
| 221 |
+
self._message_listener.Modified()
|
| 222 |
+
|
| 223 |
+
def __eq__(self, other: Any) -> bool:
|
| 224 |
+
"""Compares the current instance with another one."""
|
| 225 |
+
if self is other:
|
| 226 |
+
return True
|
| 227 |
+
# Special case for the same type which should be common and fast.
|
| 228 |
+
if isinstance(other, self.__class__):
|
| 229 |
+
return other._values == self._values
|
| 230 |
+
# We are presumably comparing against some other sequence type.
|
| 231 |
+
return other == self._values
|
| 232 |
+
|
| 233 |
+
def __deepcopy__(
|
| 234 |
+
self,
|
| 235 |
+
unused_memo: Any = None,
|
| 236 |
+
) -> 'RepeatedScalarFieldContainer[_T]':
|
| 237 |
+
clone = RepeatedScalarFieldContainer(
|
| 238 |
+
copy.deepcopy(self._message_listener), self._type_checker)
|
| 239 |
+
clone.MergeFrom(self)
|
| 240 |
+
return clone
|
| 241 |
+
|
| 242 |
+
def __reduce__(self, **kwargs) -> NoReturn:
|
| 243 |
+
raise pickle.PickleError(
|
| 244 |
+
"Can't pickle repeated scalar fields, convert to list first")
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
# TODO(slebedev): Constrain T to be a subtype of Message.
|
| 248 |
+
class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
|
| 249 |
+
"""Simple, list-like container for holding repeated composite fields."""
|
| 250 |
+
|
| 251 |
+
# Disallows assignment to other attributes.
|
| 252 |
+
__slots__ = ['_message_descriptor']
|
| 253 |
+
|
| 254 |
+
def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
|
| 255 |
+
"""
|
| 256 |
+
Note that we pass in a descriptor instead of the generated directly,
|
| 257 |
+
since at the time we construct a _RepeatedCompositeFieldContainer we
|
| 258 |
+
haven't yet necessarily initialized the type that will be contained in the
|
| 259 |
+
container.
|
| 260 |
+
|
| 261 |
+
Args:
|
| 262 |
+
message_listener: A MessageListener implementation.
|
| 263 |
+
The RepeatedCompositeFieldContainer will call this object's
|
| 264 |
+
Modified() method when it is modified.
|
| 265 |
+
message_descriptor: A Descriptor instance describing the protocol type
|
| 266 |
+
that should be present in this container. We'll use the
|
| 267 |
+
_concrete_class field of this descriptor when the client calls add().
|
| 268 |
+
"""
|
| 269 |
+
super().__init__(message_listener)
|
| 270 |
+
self._message_descriptor = message_descriptor
|
| 271 |
+
|
| 272 |
+
def add(self, **kwargs: Any) -> _T:
|
| 273 |
+
"""Adds a new element at the end of the list and returns it. Keyword
|
| 274 |
+
arguments may be used to initialize the element.
|
| 275 |
+
"""
|
| 276 |
+
new_element = self._message_descriptor._concrete_class(**kwargs)
|
| 277 |
+
new_element._SetListener(self._message_listener)
|
| 278 |
+
self._values.append(new_element)
|
| 279 |
+
if not self._message_listener.dirty:
|
| 280 |
+
self._message_listener.Modified()
|
| 281 |
+
return new_element
|
| 282 |
+
|
| 283 |
+
def append(self, value: _T) -> None:
|
| 284 |
+
"""Appends one element by copying the message."""
|
| 285 |
+
new_element = self._message_descriptor._concrete_class()
|
| 286 |
+
new_element._SetListener(self._message_listener)
|
| 287 |
+
new_element.CopyFrom(value)
|
| 288 |
+
self._values.append(new_element)
|
| 289 |
+
if not self._message_listener.dirty:
|
| 290 |
+
self._message_listener.Modified()
|
| 291 |
+
|
| 292 |
+
def insert(self, key: int, value: _T) -> None:
|
| 293 |
+
"""Inserts the item at the specified position by copying."""
|
| 294 |
+
new_element = self._message_descriptor._concrete_class()
|
| 295 |
+
new_element._SetListener(self._message_listener)
|
| 296 |
+
new_element.CopyFrom(value)
|
| 297 |
+
self._values.insert(key, new_element)
|
| 298 |
+
if not self._message_listener.dirty:
|
| 299 |
+
self._message_listener.Modified()
|
| 300 |
+
|
| 301 |
+
def extend(self, elem_seq: Iterable[_T]) -> None:
|
| 302 |
+
"""Extends by appending the given sequence of elements of the same type
|
| 303 |
+
|
| 304 |
+
as this one, copying each individual message.
|
| 305 |
+
"""
|
| 306 |
+
message_class = self._message_descriptor._concrete_class
|
| 307 |
+
listener = self._message_listener
|
| 308 |
+
values = self._values
|
| 309 |
+
for message in elem_seq:
|
| 310 |
+
new_element = message_class()
|
| 311 |
+
new_element._SetListener(listener)
|
| 312 |
+
new_element.MergeFrom(message)
|
| 313 |
+
values.append(new_element)
|
| 314 |
+
listener.Modified()
|
| 315 |
+
|
| 316 |
+
def MergeFrom(
|
| 317 |
+
self,
|
| 318 |
+
other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
|
| 319 |
+
) -> None:
|
| 320 |
+
"""Appends the contents of another repeated field of the same type to this
|
| 321 |
+
one, copying each individual message.
|
| 322 |
+
"""
|
| 323 |
+
self.extend(other)
|
| 324 |
+
|
| 325 |
+
def remove(self, elem: _T) -> None:
|
| 326 |
+
"""Removes an item from the list. Similar to list.remove()."""
|
| 327 |
+
self._values.remove(elem)
|
| 328 |
+
self._message_listener.Modified()
|
| 329 |
+
|
| 330 |
+
def pop(self, key: Optional[int] = -1) -> _T:
|
| 331 |
+
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
| 332 |
+
value = self._values[key]
|
| 333 |
+
self.__delitem__(key)
|
| 334 |
+
return value
|
| 335 |
+
|
| 336 |
+
@overload
|
| 337 |
+
def __setitem__(self, key: int, value: _T) -> None:
|
| 338 |
+
...
|
| 339 |
+
|
| 340 |
+
@overload
|
| 341 |
+
def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
|
| 342 |
+
...
|
| 343 |
+
|
| 344 |
+
def __setitem__(self, key, value):
|
| 345 |
+
# This method is implemented to make RepeatedCompositeFieldContainer
|
| 346 |
+
# structurally compatible with typing.MutableSequence. It is
|
| 347 |
+
# otherwise unsupported and will always raise an error.
|
| 348 |
+
raise TypeError(
|
| 349 |
+
f'{self.__class__.__name__} object does not support item assignment')
|
| 350 |
+
|
| 351 |
+
def __delitem__(self, key: Union[int, slice]) -> None:
|
| 352 |
+
"""Deletes the item at the specified position."""
|
| 353 |
+
del self._values[key]
|
| 354 |
+
self._message_listener.Modified()
|
| 355 |
+
|
| 356 |
+
def __eq__(self, other: Any) -> bool:
|
| 357 |
+
"""Compares the current instance with another one."""
|
| 358 |
+
if self is other:
|
| 359 |
+
return True
|
| 360 |
+
if not isinstance(other, self.__class__):
|
| 361 |
+
raise TypeError('Can only compare repeated composite fields against '
|
| 362 |
+
'other repeated composite fields.')
|
| 363 |
+
return self._values == other._values
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class ScalarMap(MutableMapping[_K, _V]):
|
| 367 |
+
"""Simple, type-checked, dict-like container for holding repeated scalars."""
|
| 368 |
+
|
| 369 |
+
# Disallows assignment to other attributes.
|
| 370 |
+
__slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
|
| 371 |
+
'_entry_descriptor']
|
| 372 |
+
|
| 373 |
+
def __init__(
|
| 374 |
+
self,
|
| 375 |
+
message_listener: Any,
|
| 376 |
+
key_checker: Any,
|
| 377 |
+
value_checker: Any,
|
| 378 |
+
entry_descriptor: Any,
|
| 379 |
+
) -> None:
|
| 380 |
+
"""
|
| 381 |
+
Args:
|
| 382 |
+
message_listener: A MessageListener implementation.
|
| 383 |
+
The ScalarMap will call this object's Modified() method when it
|
| 384 |
+
is modified.
|
| 385 |
+
key_checker: A type_checkers.ValueChecker instance to run on keys
|
| 386 |
+
inserted into this container.
|
| 387 |
+
value_checker: A type_checkers.ValueChecker instance to run on values
|
| 388 |
+
inserted into this container.
|
| 389 |
+
entry_descriptor: The MessageDescriptor of a map entry: key and value.
|
| 390 |
+
"""
|
| 391 |
+
self._message_listener = message_listener
|
| 392 |
+
self._key_checker = key_checker
|
| 393 |
+
self._value_checker = value_checker
|
| 394 |
+
self._entry_descriptor = entry_descriptor
|
| 395 |
+
self._values = {}
|
| 396 |
+
|
| 397 |
+
def __getitem__(self, key: _K) -> _V:
|
| 398 |
+
try:
|
| 399 |
+
return self._values[key]
|
| 400 |
+
except KeyError:
|
| 401 |
+
key = self._key_checker.CheckValue(key)
|
| 402 |
+
val = self._value_checker.DefaultValue()
|
| 403 |
+
self._values[key] = val
|
| 404 |
+
return val
|
| 405 |
+
|
| 406 |
+
def __contains__(self, item: _K) -> bool:
|
| 407 |
+
# We check the key's type to match the strong-typing flavor of the API.
|
| 408 |
+
# Also this makes it easier to match the behavior of the C++ implementation.
|
| 409 |
+
self._key_checker.CheckValue(item)
|
| 410 |
+
return item in self._values
|
| 411 |
+
|
| 412 |
+
@overload
|
| 413 |
+
def get(self, key: _K) -> Optional[_V]:
|
| 414 |
+
...
|
| 415 |
+
|
| 416 |
+
@overload
|
| 417 |
+
def get(self, key: _K, default: _T) -> Union[_V, _T]:
|
| 418 |
+
...
|
| 419 |
+
|
| 420 |
+
# We need to override this explicitly, because our defaultdict-like behavior
|
| 421 |
+
# will make the default implementation (from our base class) always insert
|
| 422 |
+
# the key.
|
| 423 |
+
def get(self, key, default=None):
|
| 424 |
+
if key in self:
|
| 425 |
+
return self[key]
|
| 426 |
+
else:
|
| 427 |
+
return default
|
| 428 |
+
|
| 429 |
+
def __setitem__(self, key: _K, value: _V) -> _T:
|
| 430 |
+
checked_key = self._key_checker.CheckValue(key)
|
| 431 |
+
checked_value = self._value_checker.CheckValue(value)
|
| 432 |
+
self._values[checked_key] = checked_value
|
| 433 |
+
self._message_listener.Modified()
|
| 434 |
+
|
| 435 |
+
def __delitem__(self, key: _K) -> None:
|
| 436 |
+
del self._values[key]
|
| 437 |
+
self._message_listener.Modified()
|
| 438 |
+
|
| 439 |
+
def __len__(self) -> int:
|
| 440 |
+
return len(self._values)
|
| 441 |
+
|
| 442 |
+
def __iter__(self) -> Iterator[_K]:
|
| 443 |
+
return iter(self._values)
|
| 444 |
+
|
| 445 |
+
def __repr__(self) -> str:
|
| 446 |
+
return repr(self._values)
|
| 447 |
+
|
| 448 |
+
def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
|
| 449 |
+
self._values.update(other._values)
|
| 450 |
+
self._message_listener.Modified()
|
| 451 |
+
|
| 452 |
+
def InvalidateIterators(self) -> None:
|
| 453 |
+
# It appears that the only way to reliably invalidate iterators to
|
| 454 |
+
# self._values is to ensure that its size changes.
|
| 455 |
+
original = self._values
|
| 456 |
+
self._values = original.copy()
|
| 457 |
+
original[None] = None
|
| 458 |
+
|
| 459 |
+
# This is defined in the abstract base, but we can do it much more cheaply.
|
| 460 |
+
def clear(self) -> None:
|
| 461 |
+
self._values.clear()
|
| 462 |
+
self._message_listener.Modified()
|
| 463 |
+
|
| 464 |
+
def GetEntryClass(self) -> Any:
|
| 465 |
+
return self._entry_descriptor._concrete_class
|
| 466 |
+
|
| 467 |
+
|
| 468 |
+
class MessageMap(MutableMapping[_K, _V]):
|
| 469 |
+
"""Simple, type-checked, dict-like container for with submessage values."""
|
| 470 |
+
|
| 471 |
+
# Disallows assignment to other attributes.
|
| 472 |
+
__slots__ = ['_key_checker', '_values', '_message_listener',
|
| 473 |
+
'_message_descriptor', '_entry_descriptor']
|
| 474 |
+
|
| 475 |
+
def __init__(
|
| 476 |
+
self,
|
| 477 |
+
message_listener: Any,
|
| 478 |
+
message_descriptor: Any,
|
| 479 |
+
key_checker: Any,
|
| 480 |
+
entry_descriptor: Any,
|
| 481 |
+
) -> None:
|
| 482 |
+
"""
|
| 483 |
+
Args:
|
| 484 |
+
message_listener: A MessageListener implementation.
|
| 485 |
+
The ScalarMap will call this object's Modified() method when it
|
| 486 |
+
is modified.
|
| 487 |
+
key_checker: A type_checkers.ValueChecker instance to run on keys
|
| 488 |
+
inserted into this container.
|
| 489 |
+
value_checker: A type_checkers.ValueChecker instance to run on values
|
| 490 |
+
inserted into this container.
|
| 491 |
+
entry_descriptor: The MessageDescriptor of a map entry: key and value.
|
| 492 |
+
"""
|
| 493 |
+
self._message_listener = message_listener
|
| 494 |
+
self._message_descriptor = message_descriptor
|
| 495 |
+
self._key_checker = key_checker
|
| 496 |
+
self._entry_descriptor = entry_descriptor
|
| 497 |
+
self._values = {}
|
| 498 |
+
|
| 499 |
+
def __getitem__(self, key: _K) -> _V:
|
| 500 |
+
key = self._key_checker.CheckValue(key)
|
| 501 |
+
try:
|
| 502 |
+
return self._values[key]
|
| 503 |
+
except KeyError:
|
| 504 |
+
new_element = self._message_descriptor._concrete_class()
|
| 505 |
+
new_element._SetListener(self._message_listener)
|
| 506 |
+
self._values[key] = new_element
|
| 507 |
+
self._message_listener.Modified()
|
| 508 |
+
return new_element
|
| 509 |
+
|
| 510 |
+
def get_or_create(self, key: _K) -> _V:
|
| 511 |
+
"""get_or_create() is an alias for getitem (ie. map[key]).
|
| 512 |
+
|
| 513 |
+
Args:
|
| 514 |
+
key: The key to get or create in the map.
|
| 515 |
+
|
| 516 |
+
This is useful in cases where you want to be explicit that the call is
|
| 517 |
+
mutating the map. This can avoid lint errors for statements like this
|
| 518 |
+
that otherwise would appear to be pointless statements:
|
| 519 |
+
|
| 520 |
+
msg.my_map[key]
|
| 521 |
+
"""
|
| 522 |
+
return self[key]
|
| 523 |
+
|
| 524 |
+
@overload
|
| 525 |
+
def get(self, key: _K) -> Optional[_V]:
|
| 526 |
+
...
|
| 527 |
+
|
| 528 |
+
@overload
|
| 529 |
+
def get(self, key: _K, default: _T) -> Union[_V, _T]:
|
| 530 |
+
...
|
| 531 |
+
|
| 532 |
+
# We need to override this explicitly, because our defaultdict-like behavior
|
| 533 |
+
# will make the default implementation (from our base class) always insert
|
| 534 |
+
# the key.
|
| 535 |
+
def get(self, key, default=None):
|
| 536 |
+
if key in self:
|
| 537 |
+
return self[key]
|
| 538 |
+
else:
|
| 539 |
+
return default
|
| 540 |
+
|
| 541 |
+
def __contains__(self, item: _K) -> bool:
|
| 542 |
+
item = self._key_checker.CheckValue(item)
|
| 543 |
+
return item in self._values
|
| 544 |
+
|
| 545 |
+
def __setitem__(self, key: _K, value: _V) -> NoReturn:
|
| 546 |
+
raise ValueError('May not set values directly, call my_map[key].foo = 5')
|
| 547 |
+
|
| 548 |
+
def __delitem__(self, key: _K) -> None:
|
| 549 |
+
key = self._key_checker.CheckValue(key)
|
| 550 |
+
del self._values[key]
|
| 551 |
+
self._message_listener.Modified()
|
| 552 |
+
|
| 553 |
+
def __len__(self) -> int:
|
| 554 |
+
return len(self._values)
|
| 555 |
+
|
| 556 |
+
def __iter__(self) -> Iterator[_K]:
|
| 557 |
+
return iter(self._values)
|
| 558 |
+
|
| 559 |
+
def __repr__(self) -> str:
|
| 560 |
+
return repr(self._values)
|
| 561 |
+
|
| 562 |
+
def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
|
| 563 |
+
# pylint: disable=protected-access
|
| 564 |
+
for key in other._values:
|
| 565 |
+
# According to documentation: "When parsing from the wire or when merging,
|
| 566 |
+
# if there are duplicate map keys the last key seen is used".
|
| 567 |
+
if key in self:
|
| 568 |
+
del self[key]
|
| 569 |
+
self[key].CopyFrom(other[key])
|
| 570 |
+
# self._message_listener.Modified() not required here, because
|
| 571 |
+
# mutations to submessages already propagate.
|
| 572 |
+
|
| 573 |
+
def InvalidateIterators(self) -> None:
|
| 574 |
+
# It appears that the only way to reliably invalidate iterators to
|
| 575 |
+
# self._values is to ensure that its size changes.
|
| 576 |
+
original = self._values
|
| 577 |
+
self._values = original.copy()
|
| 578 |
+
original[None] = None
|
| 579 |
+
|
| 580 |
+
# This is defined in the abstract base, but we can do it much more cheaply.
|
| 581 |
+
def clear(self) -> None:
|
| 582 |
+
self._values.clear()
|
| 583 |
+
self._message_listener.Modified()
|
| 584 |
+
|
| 585 |
+
def GetEntryClass(self) -> Any:
|
| 586 |
+
return self._entry_descriptor._concrete_class
|
| 587 |
+
|
| 588 |
+
|
| 589 |
+
class _UnknownField:
|
| 590 |
+
"""A parsed unknown field."""
|
| 591 |
+
|
| 592 |
+
# Disallows assignment to other attributes.
|
| 593 |
+
__slots__ = ['_field_number', '_wire_type', '_data']
|
| 594 |
+
|
| 595 |
+
def __init__(self, field_number, wire_type, data):
|
| 596 |
+
self._field_number = field_number
|
| 597 |
+
self._wire_type = wire_type
|
| 598 |
+
self._data = data
|
| 599 |
+
return
|
| 600 |
+
|
| 601 |
+
def __lt__(self, other):
|
| 602 |
+
# pylint: disable=protected-access
|
| 603 |
+
return self._field_number < other._field_number
|
| 604 |
+
|
| 605 |
+
def __eq__(self, other):
|
| 606 |
+
if self is other:
|
| 607 |
+
return True
|
| 608 |
+
# pylint: disable=protected-access
|
| 609 |
+
return (self._field_number == other._field_number and
|
| 610 |
+
self._wire_type == other._wire_type and
|
| 611 |
+
self._data == other._data)
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
class UnknownFieldRef: # pylint: disable=missing-class-docstring
|
| 615 |
+
|
| 616 |
+
def __init__(self, parent, index):
|
| 617 |
+
self._parent = parent
|
| 618 |
+
self._index = index
|
| 619 |
+
|
| 620 |
+
def _check_valid(self):
|
| 621 |
+
if not self._parent:
|
| 622 |
+
raise ValueError('UnknownField does not exist. '
|
| 623 |
+
'The parent message might be cleared.')
|
| 624 |
+
if self._index >= len(self._parent):
|
| 625 |
+
raise ValueError('UnknownField does not exist. '
|
| 626 |
+
'The parent message might be cleared.')
|
| 627 |
+
|
| 628 |
+
@property
|
| 629 |
+
def field_number(self):
|
| 630 |
+
self._check_valid()
|
| 631 |
+
# pylint: disable=protected-access
|
| 632 |
+
return self._parent._internal_get(self._index)._field_number
|
| 633 |
+
|
| 634 |
+
@property
|
| 635 |
+
def wire_type(self):
|
| 636 |
+
self._check_valid()
|
| 637 |
+
# pylint: disable=protected-access
|
| 638 |
+
return self._parent._internal_get(self._index)._wire_type
|
| 639 |
+
|
| 640 |
+
@property
|
| 641 |
+
def data(self):
|
| 642 |
+
self._check_valid()
|
| 643 |
+
# pylint: disable=protected-access
|
| 644 |
+
return self._parent._internal_get(self._index)._data
|
| 645 |
+
|
| 646 |
+
|
| 647 |
+
class UnknownFieldSet:
|
| 648 |
+
"""UnknownField container"""
|
| 649 |
+
|
| 650 |
+
# Disallows assignment to other attributes.
|
| 651 |
+
__slots__ = ['_values']
|
| 652 |
+
|
| 653 |
+
def __init__(self):
|
| 654 |
+
self._values = []
|
| 655 |
+
|
| 656 |
+
def __getitem__(self, index):
|
| 657 |
+
if self._values is None:
|
| 658 |
+
raise ValueError('UnknownFields does not exist. '
|
| 659 |
+
'The parent message might be cleared.')
|
| 660 |
+
size = len(self._values)
|
| 661 |
+
if index < 0:
|
| 662 |
+
index += size
|
| 663 |
+
if index < 0 or index >= size:
|
| 664 |
+
raise IndexError('index %d out of range'.index)
|
| 665 |
+
|
| 666 |
+
return UnknownFieldRef(self, index)
|
| 667 |
+
|
| 668 |
+
def _internal_get(self, index):
|
| 669 |
+
return self._values[index]
|
| 670 |
+
|
| 671 |
+
def __len__(self):
|
| 672 |
+
if self._values is None:
|
| 673 |
+
raise ValueError('UnknownFields does not exist. '
|
| 674 |
+
'The parent message might be cleared.')
|
| 675 |
+
return len(self._values)
|
| 676 |
+
|
| 677 |
+
def _add(self, field_number, wire_type, data):
|
| 678 |
+
unknown_field = _UnknownField(field_number, wire_type, data)
|
| 679 |
+
self._values.append(unknown_field)
|
| 680 |
+
return unknown_field
|
| 681 |
+
|
| 682 |
+
def __iter__(self):
|
| 683 |
+
for i in range(len(self)):
|
| 684 |
+
yield UnknownFieldRef(self, i)
|
| 685 |
+
|
| 686 |
+
def _extend(self, other):
|
| 687 |
+
if other is None:
|
| 688 |
+
return
|
| 689 |
+
# pylint: disable=protected-access
|
| 690 |
+
self._values.extend(other._values)
|
| 691 |
+
|
| 692 |
+
def __eq__(self, other):
|
| 693 |
+
if self is other:
|
| 694 |
+
return True
|
| 695 |
+
# Sort unknown fields because their order shouldn't
|
| 696 |
+
# affect equality test.
|
| 697 |
+
values = list(self._values)
|
| 698 |
+
if other is None:
|
| 699 |
+
return not values
|
| 700 |
+
values.sort()
|
| 701 |
+
# pylint: disable=protected-access
|
| 702 |
+
other_values = sorted(other._values)
|
| 703 |
+
return values == other_values
|
| 704 |
+
|
| 705 |
+
def _clear(self):
|
| 706 |
+
for value in self._values:
|
| 707 |
+
# pylint: disable=protected-access
|
| 708 |
+
if isinstance(value._data, UnknownFieldSet):
|
| 709 |
+
value._data._clear() # pylint: disable=protected-access
|
| 710 |
+
self._values = None
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/decoder.py
ADDED
|
@@ -0,0 +1,1029 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Code for decoding protocol buffer primitives.
|
| 32 |
+
|
| 33 |
+
This code is very similar to encoder.py -- read the docs for that module first.
|
| 34 |
+
|
| 35 |
+
A "decoder" is a function with the signature:
|
| 36 |
+
Decode(buffer, pos, end, message, field_dict)
|
| 37 |
+
The arguments are:
|
| 38 |
+
buffer: The string containing the encoded message.
|
| 39 |
+
pos: The current position in the string.
|
| 40 |
+
end: The position in the string where the current message ends. May be
|
| 41 |
+
less than len(buffer) if we're reading a sub-message.
|
| 42 |
+
message: The message object into which we're parsing.
|
| 43 |
+
field_dict: message._fields (avoids a hashtable lookup).
|
| 44 |
+
The decoder reads the field and stores it into field_dict, returning the new
|
| 45 |
+
buffer position. A decoder for a repeated field may proactively decode all of
|
| 46 |
+
the elements of that field, if they appear consecutively.
|
| 47 |
+
|
| 48 |
+
Note that decoders may throw any of the following:
|
| 49 |
+
IndexError: Indicates a truncated message.
|
| 50 |
+
struct.error: Unpacking of a fixed-width field failed.
|
| 51 |
+
message.DecodeError: Other errors.
|
| 52 |
+
|
| 53 |
+
Decoders are expected to raise an exception if they are called with pos > end.
|
| 54 |
+
This allows callers to be lax about bounds checking: it's fineto read past
|
| 55 |
+
"end" as long as you are sure that someone else will notice and throw an
|
| 56 |
+
exception later on.
|
| 57 |
+
|
| 58 |
+
Something up the call stack is expected to catch IndexError and struct.error
|
| 59 |
+
and convert them to message.DecodeError.
|
| 60 |
+
|
| 61 |
+
Decoders are constructed using decoder constructors with the signature:
|
| 62 |
+
MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
|
| 63 |
+
The arguments are:
|
| 64 |
+
field_number: The field number of the field we want to decode.
|
| 65 |
+
is_repeated: Is the field a repeated field? (bool)
|
| 66 |
+
is_packed: Is the field a packed field? (bool)
|
| 67 |
+
key: The key to use when looking up the field within field_dict.
|
| 68 |
+
(This is actually the FieldDescriptor but nothing in this
|
| 69 |
+
file should depend on that.)
|
| 70 |
+
new_default: A function which takes a message object as a parameter and
|
| 71 |
+
returns a new instance of the default value for this field.
|
| 72 |
+
(This is called for repeated fields and sub-messages, when an
|
| 73 |
+
instance does not already exist.)
|
| 74 |
+
|
| 75 |
+
As with encoders, we define a decoder constructor for every type of field.
|
| 76 |
+
Then, for every field of every message class we construct an actual decoder.
|
| 77 |
+
That decoder goes into a dict indexed by tag, so when we decode a message
|
| 78 |
+
we repeatedly read a tag, look up the corresponding decoder, and invoke it.
|
| 79 |
+
"""
|
| 80 |
+
|
| 81 |
+
__author__ = 'kenton@google.com (Kenton Varda)'
|
| 82 |
+
|
| 83 |
+
import math
|
| 84 |
+
import struct
|
| 85 |
+
|
| 86 |
+
from google.protobuf.internal import containers
|
| 87 |
+
from google.protobuf.internal import encoder
|
| 88 |
+
from google.protobuf.internal import wire_format
|
| 89 |
+
from google.protobuf import message
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
# This is not for optimization, but rather to avoid conflicts with local
|
| 93 |
+
# variables named "message".
|
| 94 |
+
_DecodeError = message.DecodeError
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def _VarintDecoder(mask, result_type):
|
| 98 |
+
"""Return an encoder for a basic varint value (does not include tag).
|
| 99 |
+
|
| 100 |
+
Decoded values will be bitwise-anded with the given mask before being
|
| 101 |
+
returned, e.g. to limit them to 32 bits. The returned decoder does not
|
| 102 |
+
take the usual "end" parameter -- the caller is expected to do bounds checking
|
| 103 |
+
after the fact (often the caller can defer such checking until later). The
|
| 104 |
+
decoder returns a (value, new_pos) pair.
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
def DecodeVarint(buffer, pos):
|
| 108 |
+
result = 0
|
| 109 |
+
shift = 0
|
| 110 |
+
while 1:
|
| 111 |
+
b = buffer[pos]
|
| 112 |
+
result |= ((b & 0x7f) << shift)
|
| 113 |
+
pos += 1
|
| 114 |
+
if not (b & 0x80):
|
| 115 |
+
result &= mask
|
| 116 |
+
result = result_type(result)
|
| 117 |
+
return (result, pos)
|
| 118 |
+
shift += 7
|
| 119 |
+
if shift >= 64:
|
| 120 |
+
raise _DecodeError('Too many bytes when decoding varint.')
|
| 121 |
+
return DecodeVarint
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def _SignedVarintDecoder(bits, result_type):
|
| 125 |
+
"""Like _VarintDecoder() but decodes signed values."""
|
| 126 |
+
|
| 127 |
+
signbit = 1 << (bits - 1)
|
| 128 |
+
mask = (1 << bits) - 1
|
| 129 |
+
|
| 130 |
+
def DecodeVarint(buffer, pos):
|
| 131 |
+
result = 0
|
| 132 |
+
shift = 0
|
| 133 |
+
while 1:
|
| 134 |
+
b = buffer[pos]
|
| 135 |
+
result |= ((b & 0x7f) << shift)
|
| 136 |
+
pos += 1
|
| 137 |
+
if not (b & 0x80):
|
| 138 |
+
result &= mask
|
| 139 |
+
result = (result ^ signbit) - signbit
|
| 140 |
+
result = result_type(result)
|
| 141 |
+
return (result, pos)
|
| 142 |
+
shift += 7
|
| 143 |
+
if shift >= 64:
|
| 144 |
+
raise _DecodeError('Too many bytes when decoding varint.')
|
| 145 |
+
return DecodeVarint
|
| 146 |
+
|
| 147 |
+
# All 32-bit and 64-bit values are represented as int.
|
| 148 |
+
_DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
|
| 149 |
+
_DecodeSignedVarint = _SignedVarintDecoder(64, int)
|
| 150 |
+
|
| 151 |
+
# Use these versions for values which must be limited to 32 bits.
|
| 152 |
+
_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
|
| 153 |
+
_DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def ReadTag(buffer, pos):
|
| 157 |
+
"""Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple.
|
| 158 |
+
|
| 159 |
+
We return the raw bytes of the tag rather than decoding them. The raw
|
| 160 |
+
bytes can then be used to look up the proper decoder. This effectively allows
|
| 161 |
+
us to trade some work that would be done in pure-python (decoding a varint)
|
| 162 |
+
for work that is done in C (searching for a byte string in a hash table).
|
| 163 |
+
In a low-level language it would be much cheaper to decode the varint and
|
| 164 |
+
use that, but not in Python.
|
| 165 |
+
|
| 166 |
+
Args:
|
| 167 |
+
buffer: memoryview object of the encoded bytes
|
| 168 |
+
pos: int of the current position to start from
|
| 169 |
+
|
| 170 |
+
Returns:
|
| 171 |
+
Tuple[bytes, int] of the tag data and new position.
|
| 172 |
+
"""
|
| 173 |
+
start = pos
|
| 174 |
+
while buffer[pos] & 0x80:
|
| 175 |
+
pos += 1
|
| 176 |
+
pos += 1
|
| 177 |
+
|
| 178 |
+
tag_bytes = buffer[start:pos].tobytes()
|
| 179 |
+
return tag_bytes, pos
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
# --------------------------------------------------------------------
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _SimpleDecoder(wire_type, decode_value):
|
| 186 |
+
"""Return a constructor for a decoder for fields of a particular type.
|
| 187 |
+
|
| 188 |
+
Args:
|
| 189 |
+
wire_type: The field's wire type.
|
| 190 |
+
decode_value: A function which decodes an individual value, e.g.
|
| 191 |
+
_DecodeVarint()
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default,
|
| 195 |
+
clear_if_default=False):
|
| 196 |
+
if is_packed:
|
| 197 |
+
local_DecodeVarint = _DecodeVarint
|
| 198 |
+
def DecodePackedField(buffer, pos, end, message, field_dict):
|
| 199 |
+
value = field_dict.get(key)
|
| 200 |
+
if value is None:
|
| 201 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 202 |
+
(endpoint, pos) = local_DecodeVarint(buffer, pos)
|
| 203 |
+
endpoint += pos
|
| 204 |
+
if endpoint > end:
|
| 205 |
+
raise _DecodeError('Truncated message.')
|
| 206 |
+
while pos < endpoint:
|
| 207 |
+
(element, pos) = decode_value(buffer, pos)
|
| 208 |
+
value.append(element)
|
| 209 |
+
if pos > endpoint:
|
| 210 |
+
del value[-1] # Discard corrupt value.
|
| 211 |
+
raise _DecodeError('Packed element was truncated.')
|
| 212 |
+
return pos
|
| 213 |
+
return DecodePackedField
|
| 214 |
+
elif is_repeated:
|
| 215 |
+
tag_bytes = encoder.TagBytes(field_number, wire_type)
|
| 216 |
+
tag_len = len(tag_bytes)
|
| 217 |
+
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
| 218 |
+
value = field_dict.get(key)
|
| 219 |
+
if value is None:
|
| 220 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 221 |
+
while 1:
|
| 222 |
+
(element, new_pos) = decode_value(buffer, pos)
|
| 223 |
+
value.append(element)
|
| 224 |
+
# Predict that the next tag is another copy of the same repeated
|
| 225 |
+
# field.
|
| 226 |
+
pos = new_pos + tag_len
|
| 227 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
|
| 228 |
+
# Prediction failed. Return.
|
| 229 |
+
if new_pos > end:
|
| 230 |
+
raise _DecodeError('Truncated message.')
|
| 231 |
+
return new_pos
|
| 232 |
+
return DecodeRepeatedField
|
| 233 |
+
else:
|
| 234 |
+
def DecodeField(buffer, pos, end, message, field_dict):
|
| 235 |
+
(new_value, pos) = decode_value(buffer, pos)
|
| 236 |
+
if pos > end:
|
| 237 |
+
raise _DecodeError('Truncated message.')
|
| 238 |
+
if clear_if_default and not new_value:
|
| 239 |
+
field_dict.pop(key, None)
|
| 240 |
+
else:
|
| 241 |
+
field_dict[key] = new_value
|
| 242 |
+
return pos
|
| 243 |
+
return DecodeField
|
| 244 |
+
|
| 245 |
+
return SpecificDecoder
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
def _ModifiedDecoder(wire_type, decode_value, modify_value):
|
| 249 |
+
"""Like SimpleDecoder but additionally invokes modify_value on every value
|
| 250 |
+
before storing it. Usually modify_value is ZigZagDecode.
|
| 251 |
+
"""
|
| 252 |
+
|
| 253 |
+
# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
|
| 254 |
+
# not enough to make a significant difference.
|
| 255 |
+
|
| 256 |
+
def InnerDecode(buffer, pos):
|
| 257 |
+
(result, new_pos) = decode_value(buffer, pos)
|
| 258 |
+
return (modify_value(result), new_pos)
|
| 259 |
+
return _SimpleDecoder(wire_type, InnerDecode)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
def _StructPackDecoder(wire_type, format):
|
| 263 |
+
"""Return a constructor for a decoder for a fixed-width field.
|
| 264 |
+
|
| 265 |
+
Args:
|
| 266 |
+
wire_type: The field's wire type.
|
| 267 |
+
format: The format string to pass to struct.unpack().
|
| 268 |
+
"""
|
| 269 |
+
|
| 270 |
+
value_size = struct.calcsize(format)
|
| 271 |
+
local_unpack = struct.unpack
|
| 272 |
+
|
| 273 |
+
# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
|
| 274 |
+
# not enough to make a significant difference.
|
| 275 |
+
|
| 276 |
+
# Note that we expect someone up-stack to catch struct.error and convert
|
| 277 |
+
# it to _DecodeError -- this way we don't have to set up exception-
|
| 278 |
+
# handling blocks every time we parse one value.
|
| 279 |
+
|
| 280 |
+
def InnerDecode(buffer, pos):
|
| 281 |
+
new_pos = pos + value_size
|
| 282 |
+
result = local_unpack(format, buffer[pos:new_pos])[0]
|
| 283 |
+
return (result, new_pos)
|
| 284 |
+
return _SimpleDecoder(wire_type, InnerDecode)
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
def _FloatDecoder():
|
| 288 |
+
"""Returns a decoder for a float field.
|
| 289 |
+
|
| 290 |
+
This code works around a bug in struct.unpack for non-finite 32-bit
|
| 291 |
+
floating-point values.
|
| 292 |
+
"""
|
| 293 |
+
|
| 294 |
+
local_unpack = struct.unpack
|
| 295 |
+
|
| 296 |
+
def InnerDecode(buffer, pos):
|
| 297 |
+
"""Decode serialized float to a float and new position.
|
| 298 |
+
|
| 299 |
+
Args:
|
| 300 |
+
buffer: memoryview of the serialized bytes
|
| 301 |
+
pos: int, position in the memory view to start at.
|
| 302 |
+
|
| 303 |
+
Returns:
|
| 304 |
+
Tuple[float, int] of the deserialized float value and new position
|
| 305 |
+
in the serialized data.
|
| 306 |
+
"""
|
| 307 |
+
# We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
|
| 308 |
+
# bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
|
| 309 |
+
new_pos = pos + 4
|
| 310 |
+
float_bytes = buffer[pos:new_pos].tobytes()
|
| 311 |
+
|
| 312 |
+
# If this value has all its exponent bits set, then it's non-finite.
|
| 313 |
+
# In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
|
| 314 |
+
# To avoid that, we parse it specially.
|
| 315 |
+
if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
|
| 316 |
+
# If at least one significand bit is set...
|
| 317 |
+
if float_bytes[0:3] != b'\x00\x00\x80':
|
| 318 |
+
return (math.nan, new_pos)
|
| 319 |
+
# If sign bit is set...
|
| 320 |
+
if float_bytes[3:4] == b'\xFF':
|
| 321 |
+
return (-math.inf, new_pos)
|
| 322 |
+
return (math.inf, new_pos)
|
| 323 |
+
|
| 324 |
+
# Note that we expect someone up-stack to catch struct.error and convert
|
| 325 |
+
# it to _DecodeError -- this way we don't have to set up exception-
|
| 326 |
+
# handling blocks every time we parse one value.
|
| 327 |
+
result = local_unpack('<f', float_bytes)[0]
|
| 328 |
+
return (result, new_pos)
|
| 329 |
+
return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
def _DoubleDecoder():
|
| 333 |
+
"""Returns a decoder for a double field.
|
| 334 |
+
|
| 335 |
+
This code works around a bug in struct.unpack for not-a-number.
|
| 336 |
+
"""
|
| 337 |
+
|
| 338 |
+
local_unpack = struct.unpack
|
| 339 |
+
|
| 340 |
+
def InnerDecode(buffer, pos):
|
| 341 |
+
"""Decode serialized double to a double and new position.
|
| 342 |
+
|
| 343 |
+
Args:
|
| 344 |
+
buffer: memoryview of the serialized bytes.
|
| 345 |
+
pos: int, position in the memory view to start at.
|
| 346 |
+
|
| 347 |
+
Returns:
|
| 348 |
+
Tuple[float, int] of the decoded double value and new position
|
| 349 |
+
in the serialized data.
|
| 350 |
+
"""
|
| 351 |
+
# We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
|
| 352 |
+
# bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
|
| 353 |
+
new_pos = pos + 8
|
| 354 |
+
double_bytes = buffer[pos:new_pos].tobytes()
|
| 355 |
+
|
| 356 |
+
# If this value has all its exponent bits set and at least one significand
|
| 357 |
+
# bit set, it's not a number. In Python 2.4, struct.unpack will treat it
|
| 358 |
+
# as inf or -inf. To avoid that, we treat it specially.
|
| 359 |
+
if ((double_bytes[7:8] in b'\x7F\xFF')
|
| 360 |
+
and (double_bytes[6:7] >= b'\xF0')
|
| 361 |
+
and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
|
| 362 |
+
return (math.nan, new_pos)
|
| 363 |
+
|
| 364 |
+
# Note that we expect someone up-stack to catch struct.error and convert
|
| 365 |
+
# it to _DecodeError -- this way we don't have to set up exception-
|
| 366 |
+
# handling blocks every time we parse one value.
|
| 367 |
+
result = local_unpack('<d', double_bytes)[0]
|
| 368 |
+
return (result, new_pos)
|
| 369 |
+
return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
|
| 373 |
+
clear_if_default=False):
|
| 374 |
+
"""Returns a decoder for enum field."""
|
| 375 |
+
enum_type = key.enum_type
|
| 376 |
+
if is_packed:
|
| 377 |
+
local_DecodeVarint = _DecodeVarint
|
| 378 |
+
def DecodePackedField(buffer, pos, end, message, field_dict):
|
| 379 |
+
"""Decode serialized packed enum to its value and a new position.
|
| 380 |
+
|
| 381 |
+
Args:
|
| 382 |
+
buffer: memoryview of the serialized bytes.
|
| 383 |
+
pos: int, position in the memory view to start at.
|
| 384 |
+
end: int, end position of serialized data
|
| 385 |
+
message: Message object to store unknown fields in
|
| 386 |
+
field_dict: Map[Descriptor, Any] to store decoded values in.
|
| 387 |
+
|
| 388 |
+
Returns:
|
| 389 |
+
int, new position in serialized data.
|
| 390 |
+
"""
|
| 391 |
+
value = field_dict.get(key)
|
| 392 |
+
if value is None:
|
| 393 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 394 |
+
(endpoint, pos) = local_DecodeVarint(buffer, pos)
|
| 395 |
+
endpoint += pos
|
| 396 |
+
if endpoint > end:
|
| 397 |
+
raise _DecodeError('Truncated message.')
|
| 398 |
+
while pos < endpoint:
|
| 399 |
+
value_start_pos = pos
|
| 400 |
+
(element, pos) = _DecodeSignedVarint32(buffer, pos)
|
| 401 |
+
# pylint: disable=protected-access
|
| 402 |
+
if element in enum_type.values_by_number:
|
| 403 |
+
value.append(element)
|
| 404 |
+
else:
|
| 405 |
+
if not message._unknown_fields:
|
| 406 |
+
message._unknown_fields = []
|
| 407 |
+
tag_bytes = encoder.TagBytes(field_number,
|
| 408 |
+
wire_format.WIRETYPE_VARINT)
|
| 409 |
+
|
| 410 |
+
message._unknown_fields.append(
|
| 411 |
+
(tag_bytes, buffer[value_start_pos:pos].tobytes()))
|
| 412 |
+
if message._unknown_field_set is None:
|
| 413 |
+
message._unknown_field_set = containers.UnknownFieldSet()
|
| 414 |
+
message._unknown_field_set._add(
|
| 415 |
+
field_number, wire_format.WIRETYPE_VARINT, element)
|
| 416 |
+
# pylint: enable=protected-access
|
| 417 |
+
if pos > endpoint:
|
| 418 |
+
if element in enum_type.values_by_number:
|
| 419 |
+
del value[-1] # Discard corrupt value.
|
| 420 |
+
else:
|
| 421 |
+
del message._unknown_fields[-1]
|
| 422 |
+
# pylint: disable=protected-access
|
| 423 |
+
del message._unknown_field_set._values[-1]
|
| 424 |
+
# pylint: enable=protected-access
|
| 425 |
+
raise _DecodeError('Packed element was truncated.')
|
| 426 |
+
return pos
|
| 427 |
+
return DecodePackedField
|
| 428 |
+
elif is_repeated:
|
| 429 |
+
tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
| 430 |
+
tag_len = len(tag_bytes)
|
| 431 |
+
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
| 432 |
+
"""Decode serialized repeated enum to its value and a new position.
|
| 433 |
+
|
| 434 |
+
Args:
|
| 435 |
+
buffer: memoryview of the serialized bytes.
|
| 436 |
+
pos: int, position in the memory view to start at.
|
| 437 |
+
end: int, end position of serialized data
|
| 438 |
+
message: Message object to store unknown fields in
|
| 439 |
+
field_dict: Map[Descriptor, Any] to store decoded values in.
|
| 440 |
+
|
| 441 |
+
Returns:
|
| 442 |
+
int, new position in serialized data.
|
| 443 |
+
"""
|
| 444 |
+
value = field_dict.get(key)
|
| 445 |
+
if value is None:
|
| 446 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 447 |
+
while 1:
|
| 448 |
+
(element, new_pos) = _DecodeSignedVarint32(buffer, pos)
|
| 449 |
+
# pylint: disable=protected-access
|
| 450 |
+
if element in enum_type.values_by_number:
|
| 451 |
+
value.append(element)
|
| 452 |
+
else:
|
| 453 |
+
if not message._unknown_fields:
|
| 454 |
+
message._unknown_fields = []
|
| 455 |
+
message._unknown_fields.append(
|
| 456 |
+
(tag_bytes, buffer[pos:new_pos].tobytes()))
|
| 457 |
+
if message._unknown_field_set is None:
|
| 458 |
+
message._unknown_field_set = containers.UnknownFieldSet()
|
| 459 |
+
message._unknown_field_set._add(
|
| 460 |
+
field_number, wire_format.WIRETYPE_VARINT, element)
|
| 461 |
+
# pylint: enable=protected-access
|
| 462 |
+
# Predict that the next tag is another copy of the same repeated
|
| 463 |
+
# field.
|
| 464 |
+
pos = new_pos + tag_len
|
| 465 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
|
| 466 |
+
# Prediction failed. Return.
|
| 467 |
+
if new_pos > end:
|
| 468 |
+
raise _DecodeError('Truncated message.')
|
| 469 |
+
return new_pos
|
| 470 |
+
return DecodeRepeatedField
|
| 471 |
+
else:
|
| 472 |
+
def DecodeField(buffer, pos, end, message, field_dict):
|
| 473 |
+
"""Decode serialized repeated enum to its value and a new position.
|
| 474 |
+
|
| 475 |
+
Args:
|
| 476 |
+
buffer: memoryview of the serialized bytes.
|
| 477 |
+
pos: int, position in the memory view to start at.
|
| 478 |
+
end: int, end position of serialized data
|
| 479 |
+
message: Message object to store unknown fields in
|
| 480 |
+
field_dict: Map[Descriptor, Any] to store decoded values in.
|
| 481 |
+
|
| 482 |
+
Returns:
|
| 483 |
+
int, new position in serialized data.
|
| 484 |
+
"""
|
| 485 |
+
value_start_pos = pos
|
| 486 |
+
(enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
|
| 487 |
+
if pos > end:
|
| 488 |
+
raise _DecodeError('Truncated message.')
|
| 489 |
+
if clear_if_default and not enum_value:
|
| 490 |
+
field_dict.pop(key, None)
|
| 491 |
+
return pos
|
| 492 |
+
# pylint: disable=protected-access
|
| 493 |
+
if enum_value in enum_type.values_by_number:
|
| 494 |
+
field_dict[key] = enum_value
|
| 495 |
+
else:
|
| 496 |
+
if not message._unknown_fields:
|
| 497 |
+
message._unknown_fields = []
|
| 498 |
+
tag_bytes = encoder.TagBytes(field_number,
|
| 499 |
+
wire_format.WIRETYPE_VARINT)
|
| 500 |
+
message._unknown_fields.append(
|
| 501 |
+
(tag_bytes, buffer[value_start_pos:pos].tobytes()))
|
| 502 |
+
if message._unknown_field_set is None:
|
| 503 |
+
message._unknown_field_set = containers.UnknownFieldSet()
|
| 504 |
+
message._unknown_field_set._add(
|
| 505 |
+
field_number, wire_format.WIRETYPE_VARINT, enum_value)
|
| 506 |
+
# pylint: enable=protected-access
|
| 507 |
+
return pos
|
| 508 |
+
return DecodeField
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
# --------------------------------------------------------------------
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
Int32Decoder = _SimpleDecoder(
|
| 515 |
+
wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
|
| 516 |
+
|
| 517 |
+
Int64Decoder = _SimpleDecoder(
|
| 518 |
+
wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
|
| 519 |
+
|
| 520 |
+
UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
|
| 521 |
+
UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
|
| 522 |
+
|
| 523 |
+
SInt32Decoder = _ModifiedDecoder(
|
| 524 |
+
wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
|
| 525 |
+
SInt64Decoder = _ModifiedDecoder(
|
| 526 |
+
wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
|
| 527 |
+
|
| 528 |
+
# Note that Python conveniently guarantees that when using the '<' prefix on
|
| 529 |
+
# formats, they will also have the same size across all platforms (as opposed
|
| 530 |
+
# to without the prefix, where their sizes depend on the C compiler's basic
|
| 531 |
+
# type sizes).
|
| 532 |
+
Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
|
| 533 |
+
Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
| 534 |
+
SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
|
| 535 |
+
SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
|
| 536 |
+
FloatDecoder = _FloatDecoder()
|
| 537 |
+
DoubleDecoder = _DoubleDecoder()
|
| 538 |
+
|
| 539 |
+
BoolDecoder = _ModifiedDecoder(
|
| 540 |
+
wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
|
| 541 |
+
|
| 542 |
+
|
| 543 |
+
def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
|
| 544 |
+
clear_if_default=False):
|
| 545 |
+
"""Returns a decoder for a string field."""
|
| 546 |
+
|
| 547 |
+
local_DecodeVarint = _DecodeVarint
|
| 548 |
+
|
| 549 |
+
def _ConvertToUnicode(memview):
|
| 550 |
+
"""Convert byte to unicode."""
|
| 551 |
+
byte_str = memview.tobytes()
|
| 552 |
+
try:
|
| 553 |
+
value = str(byte_str, 'utf-8')
|
| 554 |
+
except UnicodeDecodeError as e:
|
| 555 |
+
# add more information to the error message and re-raise it.
|
| 556 |
+
e.reason = '%s in field: %s' % (e, key.full_name)
|
| 557 |
+
raise
|
| 558 |
+
|
| 559 |
+
return value
|
| 560 |
+
|
| 561 |
+
assert not is_packed
|
| 562 |
+
if is_repeated:
|
| 563 |
+
tag_bytes = encoder.TagBytes(field_number,
|
| 564 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 565 |
+
tag_len = len(tag_bytes)
|
| 566 |
+
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
| 567 |
+
value = field_dict.get(key)
|
| 568 |
+
if value is None:
|
| 569 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 570 |
+
while 1:
|
| 571 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 572 |
+
new_pos = pos + size
|
| 573 |
+
if new_pos > end:
|
| 574 |
+
raise _DecodeError('Truncated string.')
|
| 575 |
+
value.append(_ConvertToUnicode(buffer[pos:new_pos]))
|
| 576 |
+
# Predict that the next tag is another copy of the same repeated field.
|
| 577 |
+
pos = new_pos + tag_len
|
| 578 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
| 579 |
+
# Prediction failed. Return.
|
| 580 |
+
return new_pos
|
| 581 |
+
return DecodeRepeatedField
|
| 582 |
+
else:
|
| 583 |
+
def DecodeField(buffer, pos, end, message, field_dict):
|
| 584 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 585 |
+
new_pos = pos + size
|
| 586 |
+
if new_pos > end:
|
| 587 |
+
raise _DecodeError('Truncated string.')
|
| 588 |
+
if clear_if_default and not size:
|
| 589 |
+
field_dict.pop(key, None)
|
| 590 |
+
else:
|
| 591 |
+
field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
|
| 592 |
+
return new_pos
|
| 593 |
+
return DecodeField
|
| 594 |
+
|
| 595 |
+
|
| 596 |
+
def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
|
| 597 |
+
clear_if_default=False):
|
| 598 |
+
"""Returns a decoder for a bytes field."""
|
| 599 |
+
|
| 600 |
+
local_DecodeVarint = _DecodeVarint
|
| 601 |
+
|
| 602 |
+
assert not is_packed
|
| 603 |
+
if is_repeated:
|
| 604 |
+
tag_bytes = encoder.TagBytes(field_number,
|
| 605 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 606 |
+
tag_len = len(tag_bytes)
|
| 607 |
+
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
| 608 |
+
value = field_dict.get(key)
|
| 609 |
+
if value is None:
|
| 610 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 611 |
+
while 1:
|
| 612 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 613 |
+
new_pos = pos + size
|
| 614 |
+
if new_pos > end:
|
| 615 |
+
raise _DecodeError('Truncated string.')
|
| 616 |
+
value.append(buffer[pos:new_pos].tobytes())
|
| 617 |
+
# Predict that the next tag is another copy of the same repeated field.
|
| 618 |
+
pos = new_pos + tag_len
|
| 619 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
| 620 |
+
# Prediction failed. Return.
|
| 621 |
+
return new_pos
|
| 622 |
+
return DecodeRepeatedField
|
| 623 |
+
else:
|
| 624 |
+
def DecodeField(buffer, pos, end, message, field_dict):
|
| 625 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 626 |
+
new_pos = pos + size
|
| 627 |
+
if new_pos > end:
|
| 628 |
+
raise _DecodeError('Truncated string.')
|
| 629 |
+
if clear_if_default and not size:
|
| 630 |
+
field_dict.pop(key, None)
|
| 631 |
+
else:
|
| 632 |
+
field_dict[key] = buffer[pos:new_pos].tobytes()
|
| 633 |
+
return new_pos
|
| 634 |
+
return DecodeField
|
| 635 |
+
|
| 636 |
+
|
| 637 |
+
def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
| 638 |
+
"""Returns a decoder for a group field."""
|
| 639 |
+
|
| 640 |
+
end_tag_bytes = encoder.TagBytes(field_number,
|
| 641 |
+
wire_format.WIRETYPE_END_GROUP)
|
| 642 |
+
end_tag_len = len(end_tag_bytes)
|
| 643 |
+
|
| 644 |
+
assert not is_packed
|
| 645 |
+
if is_repeated:
|
| 646 |
+
tag_bytes = encoder.TagBytes(field_number,
|
| 647 |
+
wire_format.WIRETYPE_START_GROUP)
|
| 648 |
+
tag_len = len(tag_bytes)
|
| 649 |
+
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
| 650 |
+
value = field_dict.get(key)
|
| 651 |
+
if value is None:
|
| 652 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 653 |
+
while 1:
|
| 654 |
+
value = field_dict.get(key)
|
| 655 |
+
if value is None:
|
| 656 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 657 |
+
# Read sub-message.
|
| 658 |
+
pos = value.add()._InternalParse(buffer, pos, end)
|
| 659 |
+
# Read end tag.
|
| 660 |
+
new_pos = pos+end_tag_len
|
| 661 |
+
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
| 662 |
+
raise _DecodeError('Missing group end tag.')
|
| 663 |
+
# Predict that the next tag is another copy of the same repeated field.
|
| 664 |
+
pos = new_pos + tag_len
|
| 665 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
| 666 |
+
# Prediction failed. Return.
|
| 667 |
+
return new_pos
|
| 668 |
+
return DecodeRepeatedField
|
| 669 |
+
else:
|
| 670 |
+
def DecodeField(buffer, pos, end, message, field_dict):
|
| 671 |
+
value = field_dict.get(key)
|
| 672 |
+
if value is None:
|
| 673 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 674 |
+
# Read sub-message.
|
| 675 |
+
pos = value._InternalParse(buffer, pos, end)
|
| 676 |
+
# Read end tag.
|
| 677 |
+
new_pos = pos+end_tag_len
|
| 678 |
+
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
| 679 |
+
raise _DecodeError('Missing group end tag.')
|
| 680 |
+
return new_pos
|
| 681 |
+
return DecodeField
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
| 685 |
+
"""Returns a decoder for a message field."""
|
| 686 |
+
|
| 687 |
+
local_DecodeVarint = _DecodeVarint
|
| 688 |
+
|
| 689 |
+
assert not is_packed
|
| 690 |
+
if is_repeated:
|
| 691 |
+
tag_bytes = encoder.TagBytes(field_number,
|
| 692 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 693 |
+
tag_len = len(tag_bytes)
|
| 694 |
+
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
| 695 |
+
value = field_dict.get(key)
|
| 696 |
+
if value is None:
|
| 697 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 698 |
+
while 1:
|
| 699 |
+
# Read length.
|
| 700 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 701 |
+
new_pos = pos + size
|
| 702 |
+
if new_pos > end:
|
| 703 |
+
raise _DecodeError('Truncated message.')
|
| 704 |
+
# Read sub-message.
|
| 705 |
+
if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
|
| 706 |
+
# The only reason _InternalParse would return early is if it
|
| 707 |
+
# encountered an end-group tag.
|
| 708 |
+
raise _DecodeError('Unexpected end-group tag.')
|
| 709 |
+
# Predict that the next tag is another copy of the same repeated field.
|
| 710 |
+
pos = new_pos + tag_len
|
| 711 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
| 712 |
+
# Prediction failed. Return.
|
| 713 |
+
return new_pos
|
| 714 |
+
return DecodeRepeatedField
|
| 715 |
+
else:
|
| 716 |
+
def DecodeField(buffer, pos, end, message, field_dict):
|
| 717 |
+
value = field_dict.get(key)
|
| 718 |
+
if value is None:
|
| 719 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 720 |
+
# Read length.
|
| 721 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 722 |
+
new_pos = pos + size
|
| 723 |
+
if new_pos > end:
|
| 724 |
+
raise _DecodeError('Truncated message.')
|
| 725 |
+
# Read sub-message.
|
| 726 |
+
if value._InternalParse(buffer, pos, new_pos) != new_pos:
|
| 727 |
+
# The only reason _InternalParse would return early is if it encountered
|
| 728 |
+
# an end-group tag.
|
| 729 |
+
raise _DecodeError('Unexpected end-group tag.')
|
| 730 |
+
return new_pos
|
| 731 |
+
return DecodeField
|
| 732 |
+
|
| 733 |
+
|
| 734 |
+
# --------------------------------------------------------------------
|
| 735 |
+
|
| 736 |
+
MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
|
| 737 |
+
|
| 738 |
+
def MessageSetItemDecoder(descriptor):
|
| 739 |
+
"""Returns a decoder for a MessageSet item.
|
| 740 |
+
|
| 741 |
+
The parameter is the message Descriptor.
|
| 742 |
+
|
| 743 |
+
The message set message looks like this:
|
| 744 |
+
message MessageSet {
|
| 745 |
+
repeated group Item = 1 {
|
| 746 |
+
required int32 type_id = 2;
|
| 747 |
+
required string message = 3;
|
| 748 |
+
}
|
| 749 |
+
}
|
| 750 |
+
"""
|
| 751 |
+
|
| 752 |
+
type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
|
| 753 |
+
message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 754 |
+
item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
| 755 |
+
|
| 756 |
+
local_ReadTag = ReadTag
|
| 757 |
+
local_DecodeVarint = _DecodeVarint
|
| 758 |
+
local_SkipField = SkipField
|
| 759 |
+
|
| 760 |
+
def DecodeItem(buffer, pos, end, message, field_dict):
|
| 761 |
+
"""Decode serialized message set to its value and new position.
|
| 762 |
+
|
| 763 |
+
Args:
|
| 764 |
+
buffer: memoryview of the serialized bytes.
|
| 765 |
+
pos: int, position in the memory view to start at.
|
| 766 |
+
end: int, end position of serialized data
|
| 767 |
+
message: Message object to store unknown fields in
|
| 768 |
+
field_dict: Map[Descriptor, Any] to store decoded values in.
|
| 769 |
+
|
| 770 |
+
Returns:
|
| 771 |
+
int, new position in serialized data.
|
| 772 |
+
"""
|
| 773 |
+
message_set_item_start = pos
|
| 774 |
+
type_id = -1
|
| 775 |
+
message_start = -1
|
| 776 |
+
message_end = -1
|
| 777 |
+
|
| 778 |
+
# Technically, type_id and message can appear in any order, so we need
|
| 779 |
+
# a little loop here.
|
| 780 |
+
while 1:
|
| 781 |
+
(tag_bytes, pos) = local_ReadTag(buffer, pos)
|
| 782 |
+
if tag_bytes == type_id_tag_bytes:
|
| 783 |
+
(type_id, pos) = local_DecodeVarint(buffer, pos)
|
| 784 |
+
elif tag_bytes == message_tag_bytes:
|
| 785 |
+
(size, message_start) = local_DecodeVarint(buffer, pos)
|
| 786 |
+
pos = message_end = message_start + size
|
| 787 |
+
elif tag_bytes == item_end_tag_bytes:
|
| 788 |
+
break
|
| 789 |
+
else:
|
| 790 |
+
pos = SkipField(buffer, pos, end, tag_bytes)
|
| 791 |
+
if pos == -1:
|
| 792 |
+
raise _DecodeError('Missing group end tag.')
|
| 793 |
+
|
| 794 |
+
if pos > end:
|
| 795 |
+
raise _DecodeError('Truncated message.')
|
| 796 |
+
|
| 797 |
+
if type_id == -1:
|
| 798 |
+
raise _DecodeError('MessageSet item missing type_id.')
|
| 799 |
+
if message_start == -1:
|
| 800 |
+
raise _DecodeError('MessageSet item missing message.')
|
| 801 |
+
|
| 802 |
+
extension = message.Extensions._FindExtensionByNumber(type_id)
|
| 803 |
+
# pylint: disable=protected-access
|
| 804 |
+
if extension is not None:
|
| 805 |
+
value = field_dict.get(extension)
|
| 806 |
+
if value is None:
|
| 807 |
+
message_type = extension.message_type
|
| 808 |
+
if not hasattr(message_type, '_concrete_class'):
|
| 809 |
+
# pylint: disable=protected-access
|
| 810 |
+
message._FACTORY.GetPrototype(message_type)
|
| 811 |
+
value = field_dict.setdefault(
|
| 812 |
+
extension, message_type._concrete_class())
|
| 813 |
+
if value._InternalParse(buffer, message_start,message_end) != message_end:
|
| 814 |
+
# The only reason _InternalParse would return early is if it encountered
|
| 815 |
+
# an end-group tag.
|
| 816 |
+
raise _DecodeError('Unexpected end-group tag.')
|
| 817 |
+
else:
|
| 818 |
+
if not message._unknown_fields:
|
| 819 |
+
message._unknown_fields = []
|
| 820 |
+
message._unknown_fields.append(
|
| 821 |
+
(MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes()))
|
| 822 |
+
if message._unknown_field_set is None:
|
| 823 |
+
message._unknown_field_set = containers.UnknownFieldSet()
|
| 824 |
+
message._unknown_field_set._add(
|
| 825 |
+
type_id,
|
| 826 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
| 827 |
+
buffer[message_start:message_end].tobytes())
|
| 828 |
+
# pylint: enable=protected-access
|
| 829 |
+
|
| 830 |
+
return pos
|
| 831 |
+
|
| 832 |
+
return DecodeItem
|
| 833 |
+
|
| 834 |
+
# --------------------------------------------------------------------
|
| 835 |
+
|
| 836 |
+
def MapDecoder(field_descriptor, new_default, is_message_map):
|
| 837 |
+
"""Returns a decoder for a map field."""
|
| 838 |
+
|
| 839 |
+
key = field_descriptor
|
| 840 |
+
tag_bytes = encoder.TagBytes(field_descriptor.number,
|
| 841 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 842 |
+
tag_len = len(tag_bytes)
|
| 843 |
+
local_DecodeVarint = _DecodeVarint
|
| 844 |
+
# Can't read _concrete_class yet; might not be initialized.
|
| 845 |
+
message_type = field_descriptor.message_type
|
| 846 |
+
|
| 847 |
+
def DecodeMap(buffer, pos, end, message, field_dict):
|
| 848 |
+
submsg = message_type._concrete_class()
|
| 849 |
+
value = field_dict.get(key)
|
| 850 |
+
if value is None:
|
| 851 |
+
value = field_dict.setdefault(key, new_default(message))
|
| 852 |
+
while 1:
|
| 853 |
+
# Read length.
|
| 854 |
+
(size, pos) = local_DecodeVarint(buffer, pos)
|
| 855 |
+
new_pos = pos + size
|
| 856 |
+
if new_pos > end:
|
| 857 |
+
raise _DecodeError('Truncated message.')
|
| 858 |
+
# Read sub-message.
|
| 859 |
+
submsg.Clear()
|
| 860 |
+
if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
|
| 861 |
+
# The only reason _InternalParse would return early is if it
|
| 862 |
+
# encountered an end-group tag.
|
| 863 |
+
raise _DecodeError('Unexpected end-group tag.')
|
| 864 |
+
|
| 865 |
+
if is_message_map:
|
| 866 |
+
value[submsg.key].CopyFrom(submsg.value)
|
| 867 |
+
else:
|
| 868 |
+
value[submsg.key] = submsg.value
|
| 869 |
+
|
| 870 |
+
# Predict that the next tag is another copy of the same repeated field.
|
| 871 |
+
pos = new_pos + tag_len
|
| 872 |
+
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
| 873 |
+
# Prediction failed. Return.
|
| 874 |
+
return new_pos
|
| 875 |
+
|
| 876 |
+
return DecodeMap
|
| 877 |
+
|
| 878 |
+
# --------------------------------------------------------------------
|
| 879 |
+
# Optimization is not as heavy here because calls to SkipField() are rare,
|
| 880 |
+
# except for handling end-group tags.
|
| 881 |
+
|
| 882 |
+
def _SkipVarint(buffer, pos, end):
|
| 883 |
+
"""Skip a varint value. Returns the new position."""
|
| 884 |
+
# Previously ord(buffer[pos]) raised IndexError when pos is out of range.
|
| 885 |
+
# With this code, ord(b'') raises TypeError. Both are handled in
|
| 886 |
+
# python_message.py to generate a 'Truncated message' error.
|
| 887 |
+
while ord(buffer[pos:pos+1].tobytes()) & 0x80:
|
| 888 |
+
pos += 1
|
| 889 |
+
pos += 1
|
| 890 |
+
if pos > end:
|
| 891 |
+
raise _DecodeError('Truncated message.')
|
| 892 |
+
return pos
|
| 893 |
+
|
| 894 |
+
def _SkipFixed64(buffer, pos, end):
|
| 895 |
+
"""Skip a fixed64 value. Returns the new position."""
|
| 896 |
+
|
| 897 |
+
pos += 8
|
| 898 |
+
if pos > end:
|
| 899 |
+
raise _DecodeError('Truncated message.')
|
| 900 |
+
return pos
|
| 901 |
+
|
| 902 |
+
|
| 903 |
+
def _DecodeFixed64(buffer, pos):
|
| 904 |
+
"""Decode a fixed64."""
|
| 905 |
+
new_pos = pos + 8
|
| 906 |
+
return (struct.unpack('<Q', buffer[pos:new_pos])[0], new_pos)
|
| 907 |
+
|
| 908 |
+
|
| 909 |
+
def _SkipLengthDelimited(buffer, pos, end):
|
| 910 |
+
"""Skip a length-delimited value. Returns the new position."""
|
| 911 |
+
|
| 912 |
+
(size, pos) = _DecodeVarint(buffer, pos)
|
| 913 |
+
pos += size
|
| 914 |
+
if pos > end:
|
| 915 |
+
raise _DecodeError('Truncated message.')
|
| 916 |
+
return pos
|
| 917 |
+
|
| 918 |
+
|
| 919 |
+
def _SkipGroup(buffer, pos, end):
|
| 920 |
+
"""Skip sub-group. Returns the new position."""
|
| 921 |
+
|
| 922 |
+
while 1:
|
| 923 |
+
(tag_bytes, pos) = ReadTag(buffer, pos)
|
| 924 |
+
new_pos = SkipField(buffer, pos, end, tag_bytes)
|
| 925 |
+
if new_pos == -1:
|
| 926 |
+
return pos
|
| 927 |
+
pos = new_pos
|
| 928 |
+
|
| 929 |
+
|
| 930 |
+
def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
|
| 931 |
+
"""Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
|
| 932 |
+
|
| 933 |
+
unknown_field_set = containers.UnknownFieldSet()
|
| 934 |
+
while end_pos is None or pos < end_pos:
|
| 935 |
+
(tag_bytes, pos) = ReadTag(buffer, pos)
|
| 936 |
+
(tag, _) = _DecodeVarint(tag_bytes, 0)
|
| 937 |
+
field_number, wire_type = wire_format.UnpackTag(tag)
|
| 938 |
+
if wire_type == wire_format.WIRETYPE_END_GROUP:
|
| 939 |
+
break
|
| 940 |
+
(data, pos) = _DecodeUnknownField(buffer, pos, wire_type)
|
| 941 |
+
# pylint: disable=protected-access
|
| 942 |
+
unknown_field_set._add(field_number, wire_type, data)
|
| 943 |
+
|
| 944 |
+
return (unknown_field_set, pos)
|
| 945 |
+
|
| 946 |
+
|
| 947 |
+
def _DecodeUnknownField(buffer, pos, wire_type):
|
| 948 |
+
"""Decode a unknown field. Returns the UnknownField and new position."""
|
| 949 |
+
|
| 950 |
+
if wire_type == wire_format.WIRETYPE_VARINT:
|
| 951 |
+
(data, pos) = _DecodeVarint(buffer, pos)
|
| 952 |
+
elif wire_type == wire_format.WIRETYPE_FIXED64:
|
| 953 |
+
(data, pos) = _DecodeFixed64(buffer, pos)
|
| 954 |
+
elif wire_type == wire_format.WIRETYPE_FIXED32:
|
| 955 |
+
(data, pos) = _DecodeFixed32(buffer, pos)
|
| 956 |
+
elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
|
| 957 |
+
(size, pos) = _DecodeVarint(buffer, pos)
|
| 958 |
+
data = buffer[pos:pos+size].tobytes()
|
| 959 |
+
pos += size
|
| 960 |
+
elif wire_type == wire_format.WIRETYPE_START_GROUP:
|
| 961 |
+
(data, pos) = _DecodeUnknownFieldSet(buffer, pos)
|
| 962 |
+
elif wire_type == wire_format.WIRETYPE_END_GROUP:
|
| 963 |
+
return (0, -1)
|
| 964 |
+
else:
|
| 965 |
+
raise _DecodeError('Wrong wire type in tag.')
|
| 966 |
+
|
| 967 |
+
return (data, pos)
|
| 968 |
+
|
| 969 |
+
|
| 970 |
+
def _EndGroup(buffer, pos, end):
|
| 971 |
+
"""Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
|
| 972 |
+
|
| 973 |
+
return -1
|
| 974 |
+
|
| 975 |
+
|
| 976 |
+
def _SkipFixed32(buffer, pos, end):
|
| 977 |
+
"""Skip a fixed32 value. Returns the new position."""
|
| 978 |
+
|
| 979 |
+
pos += 4
|
| 980 |
+
if pos > end:
|
| 981 |
+
raise _DecodeError('Truncated message.')
|
| 982 |
+
return pos
|
| 983 |
+
|
| 984 |
+
|
| 985 |
+
def _DecodeFixed32(buffer, pos):
|
| 986 |
+
"""Decode a fixed32."""
|
| 987 |
+
|
| 988 |
+
new_pos = pos + 4
|
| 989 |
+
return (struct.unpack('<I', buffer[pos:new_pos])[0], new_pos)
|
| 990 |
+
|
| 991 |
+
|
| 992 |
+
def _RaiseInvalidWireType(buffer, pos, end):
|
| 993 |
+
"""Skip function for unknown wire types. Raises an exception."""
|
| 994 |
+
|
| 995 |
+
raise _DecodeError('Tag had invalid wire type.')
|
| 996 |
+
|
| 997 |
+
def _FieldSkipper():
|
| 998 |
+
"""Constructs the SkipField function."""
|
| 999 |
+
|
| 1000 |
+
WIRETYPE_TO_SKIPPER = [
|
| 1001 |
+
_SkipVarint,
|
| 1002 |
+
_SkipFixed64,
|
| 1003 |
+
_SkipLengthDelimited,
|
| 1004 |
+
_SkipGroup,
|
| 1005 |
+
_EndGroup,
|
| 1006 |
+
_SkipFixed32,
|
| 1007 |
+
_RaiseInvalidWireType,
|
| 1008 |
+
_RaiseInvalidWireType,
|
| 1009 |
+
]
|
| 1010 |
+
|
| 1011 |
+
wiretype_mask = wire_format.TAG_TYPE_MASK
|
| 1012 |
+
|
| 1013 |
+
def SkipField(buffer, pos, end, tag_bytes):
|
| 1014 |
+
"""Skips a field with the specified tag.
|
| 1015 |
+
|
| 1016 |
+
|pos| should point to the byte immediately after the tag.
|
| 1017 |
+
|
| 1018 |
+
Returns:
|
| 1019 |
+
The new position (after the tag value), or -1 if the tag is an end-group
|
| 1020 |
+
tag (in which case the calling loop should break).
|
| 1021 |
+
"""
|
| 1022 |
+
|
| 1023 |
+
# The wire type is always in the first byte since varints are little-endian.
|
| 1024 |
+
wire_type = ord(tag_bytes[0:1]) & wiretype_mask
|
| 1025 |
+
return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
|
| 1026 |
+
|
| 1027 |
+
return SkipField
|
| 1028 |
+
|
| 1029 |
+
SkipField = _FieldSkipper()
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/encoder.py
ADDED
|
@@ -0,0 +1,829 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Code for encoding protocol message primitives.
|
| 32 |
+
|
| 33 |
+
Contains the logic for encoding every logical protocol field type
|
| 34 |
+
into one of the 5 physical wire types.
|
| 35 |
+
|
| 36 |
+
This code is designed to push the Python interpreter's performance to the
|
| 37 |
+
limits.
|
| 38 |
+
|
| 39 |
+
The basic idea is that at startup time, for every field (i.e. every
|
| 40 |
+
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
|
| 41 |
+
sizer takes a value of this field's type and computes its byte size. The
|
| 42 |
+
encoder takes a writer function and a value. It encodes the value into byte
|
| 43 |
+
strings and invokes the writer function to write those strings. Typically the
|
| 44 |
+
writer function is the write() method of a BytesIO.
|
| 45 |
+
|
| 46 |
+
We try to do as much work as possible when constructing the writer and the
|
| 47 |
+
sizer rather than when calling them. In particular:
|
| 48 |
+
* We copy any needed global functions to local variables, so that we do not need
|
| 49 |
+
to do costly global table lookups at runtime.
|
| 50 |
+
* Similarly, we try to do any attribute lookups at startup time if possible.
|
| 51 |
+
* Every field's tag is encoded to bytes at startup, since it can't change at
|
| 52 |
+
runtime.
|
| 53 |
+
* Whatever component of the field size we can compute at startup, we do.
|
| 54 |
+
* We *avoid* sharing code if doing so would make the code slower and not sharing
|
| 55 |
+
does not burden us too much. For example, encoders for repeated fields do
|
| 56 |
+
not just call the encoders for singular fields in a loop because this would
|
| 57 |
+
add an extra function call overhead for every loop iteration; instead, we
|
| 58 |
+
manually inline the single-value encoder into the loop.
|
| 59 |
+
* If a Python function lacks a return statement, Python actually generates
|
| 60 |
+
instructions to pop the result of the last statement off the stack, push
|
| 61 |
+
None onto the stack, and then return that. If we really don't care what
|
| 62 |
+
value is returned, then we can save two instructions by returning the
|
| 63 |
+
result of the last statement. It looks funny but it helps.
|
| 64 |
+
* We assume that type and bounds checking has happened at a higher level.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
__author__ = 'kenton@google.com (Kenton Varda)'
|
| 68 |
+
|
| 69 |
+
import struct
|
| 70 |
+
|
| 71 |
+
from google.protobuf.internal import wire_format
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
# This will overflow and thus become IEEE-754 "infinity". We would use
|
| 75 |
+
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
| 76 |
+
_POS_INF = 1e10000
|
| 77 |
+
_NEG_INF = -_POS_INF
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def _VarintSize(value):
|
| 81 |
+
"""Compute the size of a varint value."""
|
| 82 |
+
if value <= 0x7f: return 1
|
| 83 |
+
if value <= 0x3fff: return 2
|
| 84 |
+
if value <= 0x1fffff: return 3
|
| 85 |
+
if value <= 0xfffffff: return 4
|
| 86 |
+
if value <= 0x7ffffffff: return 5
|
| 87 |
+
if value <= 0x3ffffffffff: return 6
|
| 88 |
+
if value <= 0x1ffffffffffff: return 7
|
| 89 |
+
if value <= 0xffffffffffffff: return 8
|
| 90 |
+
if value <= 0x7fffffffffffffff: return 9
|
| 91 |
+
return 10
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def _SignedVarintSize(value):
|
| 95 |
+
"""Compute the size of a signed varint value."""
|
| 96 |
+
if value < 0: return 10
|
| 97 |
+
if value <= 0x7f: return 1
|
| 98 |
+
if value <= 0x3fff: return 2
|
| 99 |
+
if value <= 0x1fffff: return 3
|
| 100 |
+
if value <= 0xfffffff: return 4
|
| 101 |
+
if value <= 0x7ffffffff: return 5
|
| 102 |
+
if value <= 0x3ffffffffff: return 6
|
| 103 |
+
if value <= 0x1ffffffffffff: return 7
|
| 104 |
+
if value <= 0xffffffffffffff: return 8
|
| 105 |
+
if value <= 0x7fffffffffffffff: return 9
|
| 106 |
+
return 10
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
def _TagSize(field_number):
|
| 110 |
+
"""Returns the number of bytes required to serialize a tag with this field
|
| 111 |
+
number."""
|
| 112 |
+
# Just pass in type 0, since the type won't affect the tag+type size.
|
| 113 |
+
return _VarintSize(wire_format.PackTag(field_number, 0))
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
# --------------------------------------------------------------------
|
| 117 |
+
# In this section we define some generic sizers. Each of these functions
|
| 118 |
+
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
|
| 119 |
+
# It returns another function which in turn takes parameters specific to a
|
| 120 |
+
# particular field, e.g. the field number and whether it is repeated or packed.
|
| 121 |
+
# Look at the next section to see how these are used.
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def _SimpleSizer(compute_value_size):
|
| 125 |
+
"""A sizer which uses the function compute_value_size to compute the size of
|
| 126 |
+
each value. Typically compute_value_size is _VarintSize."""
|
| 127 |
+
|
| 128 |
+
def SpecificSizer(field_number, is_repeated, is_packed):
|
| 129 |
+
tag_size = _TagSize(field_number)
|
| 130 |
+
if is_packed:
|
| 131 |
+
local_VarintSize = _VarintSize
|
| 132 |
+
def PackedFieldSize(value):
|
| 133 |
+
result = 0
|
| 134 |
+
for element in value:
|
| 135 |
+
result += compute_value_size(element)
|
| 136 |
+
return result + local_VarintSize(result) + tag_size
|
| 137 |
+
return PackedFieldSize
|
| 138 |
+
elif is_repeated:
|
| 139 |
+
def RepeatedFieldSize(value):
|
| 140 |
+
result = tag_size * len(value)
|
| 141 |
+
for element in value:
|
| 142 |
+
result += compute_value_size(element)
|
| 143 |
+
return result
|
| 144 |
+
return RepeatedFieldSize
|
| 145 |
+
else:
|
| 146 |
+
def FieldSize(value):
|
| 147 |
+
return tag_size + compute_value_size(value)
|
| 148 |
+
return FieldSize
|
| 149 |
+
|
| 150 |
+
return SpecificSizer
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
def _ModifiedSizer(compute_value_size, modify_value):
|
| 154 |
+
"""Like SimpleSizer, but modify_value is invoked on each value before it is
|
| 155 |
+
passed to compute_value_size. modify_value is typically ZigZagEncode."""
|
| 156 |
+
|
| 157 |
+
def SpecificSizer(field_number, is_repeated, is_packed):
|
| 158 |
+
tag_size = _TagSize(field_number)
|
| 159 |
+
if is_packed:
|
| 160 |
+
local_VarintSize = _VarintSize
|
| 161 |
+
def PackedFieldSize(value):
|
| 162 |
+
result = 0
|
| 163 |
+
for element in value:
|
| 164 |
+
result += compute_value_size(modify_value(element))
|
| 165 |
+
return result + local_VarintSize(result) + tag_size
|
| 166 |
+
return PackedFieldSize
|
| 167 |
+
elif is_repeated:
|
| 168 |
+
def RepeatedFieldSize(value):
|
| 169 |
+
result = tag_size * len(value)
|
| 170 |
+
for element in value:
|
| 171 |
+
result += compute_value_size(modify_value(element))
|
| 172 |
+
return result
|
| 173 |
+
return RepeatedFieldSize
|
| 174 |
+
else:
|
| 175 |
+
def FieldSize(value):
|
| 176 |
+
return tag_size + compute_value_size(modify_value(value))
|
| 177 |
+
return FieldSize
|
| 178 |
+
|
| 179 |
+
return SpecificSizer
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
def _FixedSizer(value_size):
|
| 183 |
+
"""Like _SimpleSizer except for a fixed-size field. The input is the size
|
| 184 |
+
of one value."""
|
| 185 |
+
|
| 186 |
+
def SpecificSizer(field_number, is_repeated, is_packed):
|
| 187 |
+
tag_size = _TagSize(field_number)
|
| 188 |
+
if is_packed:
|
| 189 |
+
local_VarintSize = _VarintSize
|
| 190 |
+
def PackedFieldSize(value):
|
| 191 |
+
result = len(value) * value_size
|
| 192 |
+
return result + local_VarintSize(result) + tag_size
|
| 193 |
+
return PackedFieldSize
|
| 194 |
+
elif is_repeated:
|
| 195 |
+
element_size = value_size + tag_size
|
| 196 |
+
def RepeatedFieldSize(value):
|
| 197 |
+
return len(value) * element_size
|
| 198 |
+
return RepeatedFieldSize
|
| 199 |
+
else:
|
| 200 |
+
field_size = value_size + tag_size
|
| 201 |
+
def FieldSize(value):
|
| 202 |
+
return field_size
|
| 203 |
+
return FieldSize
|
| 204 |
+
|
| 205 |
+
return SpecificSizer
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
# ====================================================================
|
| 209 |
+
# Here we declare a sizer constructor for each field type. Each "sizer
|
| 210 |
+
# constructor" is a function that takes (field_number, is_repeated, is_packed)
|
| 211 |
+
# as parameters and returns a sizer, which in turn takes a field value as
|
| 212 |
+
# a parameter and returns its encoded size.
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
|
| 216 |
+
|
| 217 |
+
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
|
| 218 |
+
|
| 219 |
+
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
|
| 220 |
+
_SignedVarintSize, wire_format.ZigZagEncode)
|
| 221 |
+
|
| 222 |
+
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
|
| 223 |
+
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
|
| 224 |
+
|
| 225 |
+
BoolSizer = _FixedSizer(1)
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def StringSizer(field_number, is_repeated, is_packed):
|
| 229 |
+
"""Returns a sizer for a string field."""
|
| 230 |
+
|
| 231 |
+
tag_size = _TagSize(field_number)
|
| 232 |
+
local_VarintSize = _VarintSize
|
| 233 |
+
local_len = len
|
| 234 |
+
assert not is_packed
|
| 235 |
+
if is_repeated:
|
| 236 |
+
def RepeatedFieldSize(value):
|
| 237 |
+
result = tag_size * len(value)
|
| 238 |
+
for element in value:
|
| 239 |
+
l = local_len(element.encode('utf-8'))
|
| 240 |
+
result += local_VarintSize(l) + l
|
| 241 |
+
return result
|
| 242 |
+
return RepeatedFieldSize
|
| 243 |
+
else:
|
| 244 |
+
def FieldSize(value):
|
| 245 |
+
l = local_len(value.encode('utf-8'))
|
| 246 |
+
return tag_size + local_VarintSize(l) + l
|
| 247 |
+
return FieldSize
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def BytesSizer(field_number, is_repeated, is_packed):
|
| 251 |
+
"""Returns a sizer for a bytes field."""
|
| 252 |
+
|
| 253 |
+
tag_size = _TagSize(field_number)
|
| 254 |
+
local_VarintSize = _VarintSize
|
| 255 |
+
local_len = len
|
| 256 |
+
assert not is_packed
|
| 257 |
+
if is_repeated:
|
| 258 |
+
def RepeatedFieldSize(value):
|
| 259 |
+
result = tag_size * len(value)
|
| 260 |
+
for element in value:
|
| 261 |
+
l = local_len(element)
|
| 262 |
+
result += local_VarintSize(l) + l
|
| 263 |
+
return result
|
| 264 |
+
return RepeatedFieldSize
|
| 265 |
+
else:
|
| 266 |
+
def FieldSize(value):
|
| 267 |
+
l = local_len(value)
|
| 268 |
+
return tag_size + local_VarintSize(l) + l
|
| 269 |
+
return FieldSize
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
def GroupSizer(field_number, is_repeated, is_packed):
|
| 273 |
+
"""Returns a sizer for a group field."""
|
| 274 |
+
|
| 275 |
+
tag_size = _TagSize(field_number) * 2
|
| 276 |
+
assert not is_packed
|
| 277 |
+
if is_repeated:
|
| 278 |
+
def RepeatedFieldSize(value):
|
| 279 |
+
result = tag_size * len(value)
|
| 280 |
+
for element in value:
|
| 281 |
+
result += element.ByteSize()
|
| 282 |
+
return result
|
| 283 |
+
return RepeatedFieldSize
|
| 284 |
+
else:
|
| 285 |
+
def FieldSize(value):
|
| 286 |
+
return tag_size + value.ByteSize()
|
| 287 |
+
return FieldSize
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def MessageSizer(field_number, is_repeated, is_packed):
|
| 291 |
+
"""Returns a sizer for a message field."""
|
| 292 |
+
|
| 293 |
+
tag_size = _TagSize(field_number)
|
| 294 |
+
local_VarintSize = _VarintSize
|
| 295 |
+
assert not is_packed
|
| 296 |
+
if is_repeated:
|
| 297 |
+
def RepeatedFieldSize(value):
|
| 298 |
+
result = tag_size * len(value)
|
| 299 |
+
for element in value:
|
| 300 |
+
l = element.ByteSize()
|
| 301 |
+
result += local_VarintSize(l) + l
|
| 302 |
+
return result
|
| 303 |
+
return RepeatedFieldSize
|
| 304 |
+
else:
|
| 305 |
+
def FieldSize(value):
|
| 306 |
+
l = value.ByteSize()
|
| 307 |
+
return tag_size + local_VarintSize(l) + l
|
| 308 |
+
return FieldSize
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
# --------------------------------------------------------------------
|
| 312 |
+
# MessageSet is special: it needs custom logic to compute its size properly.
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def MessageSetItemSizer(field_number):
|
| 316 |
+
"""Returns a sizer for extensions of MessageSet.
|
| 317 |
+
|
| 318 |
+
The message set message looks like this:
|
| 319 |
+
message MessageSet {
|
| 320 |
+
repeated group Item = 1 {
|
| 321 |
+
required int32 type_id = 2;
|
| 322 |
+
required string message = 3;
|
| 323 |
+
}
|
| 324 |
+
}
|
| 325 |
+
"""
|
| 326 |
+
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
|
| 327 |
+
_TagSize(3))
|
| 328 |
+
local_VarintSize = _VarintSize
|
| 329 |
+
|
| 330 |
+
def FieldSize(value):
|
| 331 |
+
l = value.ByteSize()
|
| 332 |
+
return static_size + local_VarintSize(l) + l
|
| 333 |
+
|
| 334 |
+
return FieldSize
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
# --------------------------------------------------------------------
|
| 338 |
+
# Map is special: it needs custom logic to compute its size properly.
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
def MapSizer(field_descriptor, is_message_map):
|
| 342 |
+
"""Returns a sizer for a map field."""
|
| 343 |
+
|
| 344 |
+
# Can't look at field_descriptor.message_type._concrete_class because it may
|
| 345 |
+
# not have been initialized yet.
|
| 346 |
+
message_type = field_descriptor.message_type
|
| 347 |
+
message_sizer = MessageSizer(field_descriptor.number, False, False)
|
| 348 |
+
|
| 349 |
+
def FieldSize(map_value):
|
| 350 |
+
total = 0
|
| 351 |
+
for key in map_value:
|
| 352 |
+
value = map_value[key]
|
| 353 |
+
# It's wasteful to create the messages and throw them away one second
|
| 354 |
+
# later since we'll do the same for the actual encode. But there's not an
|
| 355 |
+
# obvious way to avoid this within the current design without tons of code
|
| 356 |
+
# duplication. For message map, value.ByteSize() should be called to
|
| 357 |
+
# update the status.
|
| 358 |
+
entry_msg = message_type._concrete_class(key=key, value=value)
|
| 359 |
+
total += message_sizer(entry_msg)
|
| 360 |
+
if is_message_map:
|
| 361 |
+
value.ByteSize()
|
| 362 |
+
return total
|
| 363 |
+
|
| 364 |
+
return FieldSize
|
| 365 |
+
|
| 366 |
+
# ====================================================================
|
| 367 |
+
# Encoders!
|
| 368 |
+
|
| 369 |
+
|
| 370 |
+
def _VarintEncoder():
|
| 371 |
+
"""Return an encoder for a basic varint value (does not include tag)."""
|
| 372 |
+
|
| 373 |
+
local_int2byte = struct.Struct('>B').pack
|
| 374 |
+
|
| 375 |
+
def EncodeVarint(write, value, unused_deterministic=None):
|
| 376 |
+
bits = value & 0x7f
|
| 377 |
+
value >>= 7
|
| 378 |
+
while value:
|
| 379 |
+
write(local_int2byte(0x80|bits))
|
| 380 |
+
bits = value & 0x7f
|
| 381 |
+
value >>= 7
|
| 382 |
+
return write(local_int2byte(bits))
|
| 383 |
+
|
| 384 |
+
return EncodeVarint
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
def _SignedVarintEncoder():
|
| 388 |
+
"""Return an encoder for a basic signed varint value (does not include
|
| 389 |
+
tag)."""
|
| 390 |
+
|
| 391 |
+
local_int2byte = struct.Struct('>B').pack
|
| 392 |
+
|
| 393 |
+
def EncodeSignedVarint(write, value, unused_deterministic=None):
|
| 394 |
+
if value < 0:
|
| 395 |
+
value += (1 << 64)
|
| 396 |
+
bits = value & 0x7f
|
| 397 |
+
value >>= 7
|
| 398 |
+
while value:
|
| 399 |
+
write(local_int2byte(0x80|bits))
|
| 400 |
+
bits = value & 0x7f
|
| 401 |
+
value >>= 7
|
| 402 |
+
return write(local_int2byte(bits))
|
| 403 |
+
|
| 404 |
+
return EncodeSignedVarint
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
_EncodeVarint = _VarintEncoder()
|
| 408 |
+
_EncodeSignedVarint = _SignedVarintEncoder()
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
def _VarintBytes(value):
|
| 412 |
+
"""Encode the given integer as a varint and return the bytes. This is only
|
| 413 |
+
called at startup time so it doesn't need to be fast."""
|
| 414 |
+
|
| 415 |
+
pieces = []
|
| 416 |
+
_EncodeVarint(pieces.append, value, True)
|
| 417 |
+
return b"".join(pieces)
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
def TagBytes(field_number, wire_type):
|
| 421 |
+
"""Encode the given tag and return the bytes. Only called at startup."""
|
| 422 |
+
|
| 423 |
+
return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
|
| 424 |
+
|
| 425 |
+
# --------------------------------------------------------------------
|
| 426 |
+
# As with sizers (see above), we have a number of common encoder
|
| 427 |
+
# implementations.
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
|
| 431 |
+
"""Return a constructor for an encoder for fields of a particular type.
|
| 432 |
+
|
| 433 |
+
Args:
|
| 434 |
+
wire_type: The field's wire type, for encoding tags.
|
| 435 |
+
encode_value: A function which encodes an individual value, e.g.
|
| 436 |
+
_EncodeVarint().
|
| 437 |
+
compute_value_size: A function which computes the size of an individual
|
| 438 |
+
value, e.g. _VarintSize().
|
| 439 |
+
"""
|
| 440 |
+
|
| 441 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 442 |
+
if is_packed:
|
| 443 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 444 |
+
local_EncodeVarint = _EncodeVarint
|
| 445 |
+
def EncodePackedField(write, value, deterministic):
|
| 446 |
+
write(tag_bytes)
|
| 447 |
+
size = 0
|
| 448 |
+
for element in value:
|
| 449 |
+
size += compute_value_size(element)
|
| 450 |
+
local_EncodeVarint(write, size, deterministic)
|
| 451 |
+
for element in value:
|
| 452 |
+
encode_value(write, element, deterministic)
|
| 453 |
+
return EncodePackedField
|
| 454 |
+
elif is_repeated:
|
| 455 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 456 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 457 |
+
for element in value:
|
| 458 |
+
write(tag_bytes)
|
| 459 |
+
encode_value(write, element, deterministic)
|
| 460 |
+
return EncodeRepeatedField
|
| 461 |
+
else:
|
| 462 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 463 |
+
def EncodeField(write, value, deterministic):
|
| 464 |
+
write(tag_bytes)
|
| 465 |
+
return encode_value(write, value, deterministic)
|
| 466 |
+
return EncodeField
|
| 467 |
+
|
| 468 |
+
return SpecificEncoder
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
|
| 472 |
+
"""Like SimpleEncoder but additionally invokes modify_value on every value
|
| 473 |
+
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
|
| 474 |
+
|
| 475 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 476 |
+
if is_packed:
|
| 477 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 478 |
+
local_EncodeVarint = _EncodeVarint
|
| 479 |
+
def EncodePackedField(write, value, deterministic):
|
| 480 |
+
write(tag_bytes)
|
| 481 |
+
size = 0
|
| 482 |
+
for element in value:
|
| 483 |
+
size += compute_value_size(modify_value(element))
|
| 484 |
+
local_EncodeVarint(write, size, deterministic)
|
| 485 |
+
for element in value:
|
| 486 |
+
encode_value(write, modify_value(element), deterministic)
|
| 487 |
+
return EncodePackedField
|
| 488 |
+
elif is_repeated:
|
| 489 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 490 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 491 |
+
for element in value:
|
| 492 |
+
write(tag_bytes)
|
| 493 |
+
encode_value(write, modify_value(element), deterministic)
|
| 494 |
+
return EncodeRepeatedField
|
| 495 |
+
else:
|
| 496 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 497 |
+
def EncodeField(write, value, deterministic):
|
| 498 |
+
write(tag_bytes)
|
| 499 |
+
return encode_value(write, modify_value(value), deterministic)
|
| 500 |
+
return EncodeField
|
| 501 |
+
|
| 502 |
+
return SpecificEncoder
|
| 503 |
+
|
| 504 |
+
|
| 505 |
+
def _StructPackEncoder(wire_type, format):
|
| 506 |
+
"""Return a constructor for an encoder for a fixed-width field.
|
| 507 |
+
|
| 508 |
+
Args:
|
| 509 |
+
wire_type: The field's wire type, for encoding tags.
|
| 510 |
+
format: The format string to pass to struct.pack().
|
| 511 |
+
"""
|
| 512 |
+
|
| 513 |
+
value_size = struct.calcsize(format)
|
| 514 |
+
|
| 515 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 516 |
+
local_struct_pack = struct.pack
|
| 517 |
+
if is_packed:
|
| 518 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 519 |
+
local_EncodeVarint = _EncodeVarint
|
| 520 |
+
def EncodePackedField(write, value, deterministic):
|
| 521 |
+
write(tag_bytes)
|
| 522 |
+
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
| 523 |
+
for element in value:
|
| 524 |
+
write(local_struct_pack(format, element))
|
| 525 |
+
return EncodePackedField
|
| 526 |
+
elif is_repeated:
|
| 527 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 528 |
+
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
| 529 |
+
for element in value:
|
| 530 |
+
write(tag_bytes)
|
| 531 |
+
write(local_struct_pack(format, element))
|
| 532 |
+
return EncodeRepeatedField
|
| 533 |
+
else:
|
| 534 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 535 |
+
def EncodeField(write, value, unused_deterministic=None):
|
| 536 |
+
write(tag_bytes)
|
| 537 |
+
return write(local_struct_pack(format, value))
|
| 538 |
+
return EncodeField
|
| 539 |
+
|
| 540 |
+
return SpecificEncoder
|
| 541 |
+
|
| 542 |
+
|
| 543 |
+
def _FloatingPointEncoder(wire_type, format):
|
| 544 |
+
"""Return a constructor for an encoder for float fields.
|
| 545 |
+
|
| 546 |
+
This is like StructPackEncoder, but catches errors that may be due to
|
| 547 |
+
passing non-finite floating-point values to struct.pack, and makes a
|
| 548 |
+
second attempt to encode those values.
|
| 549 |
+
|
| 550 |
+
Args:
|
| 551 |
+
wire_type: The field's wire type, for encoding tags.
|
| 552 |
+
format: The format string to pass to struct.pack().
|
| 553 |
+
"""
|
| 554 |
+
|
| 555 |
+
value_size = struct.calcsize(format)
|
| 556 |
+
if value_size == 4:
|
| 557 |
+
def EncodeNonFiniteOrRaise(write, value):
|
| 558 |
+
# Remember that the serialized form uses little-endian byte order.
|
| 559 |
+
if value == _POS_INF:
|
| 560 |
+
write(b'\x00\x00\x80\x7F')
|
| 561 |
+
elif value == _NEG_INF:
|
| 562 |
+
write(b'\x00\x00\x80\xFF')
|
| 563 |
+
elif value != value: # NaN
|
| 564 |
+
write(b'\x00\x00\xC0\x7F')
|
| 565 |
+
else:
|
| 566 |
+
raise
|
| 567 |
+
elif value_size == 8:
|
| 568 |
+
def EncodeNonFiniteOrRaise(write, value):
|
| 569 |
+
if value == _POS_INF:
|
| 570 |
+
write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
| 571 |
+
elif value == _NEG_INF:
|
| 572 |
+
write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
| 573 |
+
elif value != value: # NaN
|
| 574 |
+
write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
| 575 |
+
else:
|
| 576 |
+
raise
|
| 577 |
+
else:
|
| 578 |
+
raise ValueError('Can\'t encode floating-point values that are '
|
| 579 |
+
'%d bytes long (only 4 or 8)' % value_size)
|
| 580 |
+
|
| 581 |
+
def SpecificEncoder(field_number, is_repeated, is_packed):
|
| 582 |
+
local_struct_pack = struct.pack
|
| 583 |
+
if is_packed:
|
| 584 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 585 |
+
local_EncodeVarint = _EncodeVarint
|
| 586 |
+
def EncodePackedField(write, value, deterministic):
|
| 587 |
+
write(tag_bytes)
|
| 588 |
+
local_EncodeVarint(write, len(value) * value_size, deterministic)
|
| 589 |
+
for element in value:
|
| 590 |
+
# This try/except block is going to be faster than any code that
|
| 591 |
+
# we could write to check whether element is finite.
|
| 592 |
+
try:
|
| 593 |
+
write(local_struct_pack(format, element))
|
| 594 |
+
except SystemError:
|
| 595 |
+
EncodeNonFiniteOrRaise(write, element)
|
| 596 |
+
return EncodePackedField
|
| 597 |
+
elif is_repeated:
|
| 598 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 599 |
+
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
| 600 |
+
for element in value:
|
| 601 |
+
write(tag_bytes)
|
| 602 |
+
try:
|
| 603 |
+
write(local_struct_pack(format, element))
|
| 604 |
+
except SystemError:
|
| 605 |
+
EncodeNonFiniteOrRaise(write, element)
|
| 606 |
+
return EncodeRepeatedField
|
| 607 |
+
else:
|
| 608 |
+
tag_bytes = TagBytes(field_number, wire_type)
|
| 609 |
+
def EncodeField(write, value, unused_deterministic=None):
|
| 610 |
+
write(tag_bytes)
|
| 611 |
+
try:
|
| 612 |
+
write(local_struct_pack(format, value))
|
| 613 |
+
except SystemError:
|
| 614 |
+
EncodeNonFiniteOrRaise(write, value)
|
| 615 |
+
return EncodeField
|
| 616 |
+
|
| 617 |
+
return SpecificEncoder
|
| 618 |
+
|
| 619 |
+
|
| 620 |
+
# ====================================================================
|
| 621 |
+
# Here we declare an encoder constructor for each field type. These work
|
| 622 |
+
# very similarly to sizer constructors, described earlier.
|
| 623 |
+
|
| 624 |
+
|
| 625 |
+
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
|
| 626 |
+
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
|
| 627 |
+
|
| 628 |
+
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
|
| 629 |
+
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
|
| 630 |
+
|
| 631 |
+
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
|
| 632 |
+
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
|
| 633 |
+
wire_format.ZigZagEncode)
|
| 634 |
+
|
| 635 |
+
# Note that Python conveniently guarantees that when using the '<' prefix on
|
| 636 |
+
# formats, they will also have the same size across all platforms (as opposed
|
| 637 |
+
# to without the prefix, where their sizes depend on the C compiler's basic
|
| 638 |
+
# type sizes).
|
| 639 |
+
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
|
| 640 |
+
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
| 641 |
+
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
|
| 642 |
+
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
|
| 643 |
+
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
|
| 644 |
+
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
|
| 645 |
+
|
| 646 |
+
|
| 647 |
+
def BoolEncoder(field_number, is_repeated, is_packed):
|
| 648 |
+
"""Returns an encoder for a boolean field."""
|
| 649 |
+
|
| 650 |
+
false_byte = b'\x00'
|
| 651 |
+
true_byte = b'\x01'
|
| 652 |
+
if is_packed:
|
| 653 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 654 |
+
local_EncodeVarint = _EncodeVarint
|
| 655 |
+
def EncodePackedField(write, value, deterministic):
|
| 656 |
+
write(tag_bytes)
|
| 657 |
+
local_EncodeVarint(write, len(value), deterministic)
|
| 658 |
+
for element in value:
|
| 659 |
+
if element:
|
| 660 |
+
write(true_byte)
|
| 661 |
+
else:
|
| 662 |
+
write(false_byte)
|
| 663 |
+
return EncodePackedField
|
| 664 |
+
elif is_repeated:
|
| 665 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
| 666 |
+
def EncodeRepeatedField(write, value, unused_deterministic=None):
|
| 667 |
+
for element in value:
|
| 668 |
+
write(tag_bytes)
|
| 669 |
+
if element:
|
| 670 |
+
write(true_byte)
|
| 671 |
+
else:
|
| 672 |
+
write(false_byte)
|
| 673 |
+
return EncodeRepeatedField
|
| 674 |
+
else:
|
| 675 |
+
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
| 676 |
+
def EncodeField(write, value, unused_deterministic=None):
|
| 677 |
+
write(tag_bytes)
|
| 678 |
+
if value:
|
| 679 |
+
return write(true_byte)
|
| 680 |
+
return write(false_byte)
|
| 681 |
+
return EncodeField
|
| 682 |
+
|
| 683 |
+
|
| 684 |
+
def StringEncoder(field_number, is_repeated, is_packed):
|
| 685 |
+
"""Returns an encoder for a string field."""
|
| 686 |
+
|
| 687 |
+
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 688 |
+
local_EncodeVarint = _EncodeVarint
|
| 689 |
+
local_len = len
|
| 690 |
+
assert not is_packed
|
| 691 |
+
if is_repeated:
|
| 692 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 693 |
+
for element in value:
|
| 694 |
+
encoded = element.encode('utf-8')
|
| 695 |
+
write(tag)
|
| 696 |
+
local_EncodeVarint(write, local_len(encoded), deterministic)
|
| 697 |
+
write(encoded)
|
| 698 |
+
return EncodeRepeatedField
|
| 699 |
+
else:
|
| 700 |
+
def EncodeField(write, value, deterministic):
|
| 701 |
+
encoded = value.encode('utf-8')
|
| 702 |
+
write(tag)
|
| 703 |
+
local_EncodeVarint(write, local_len(encoded), deterministic)
|
| 704 |
+
return write(encoded)
|
| 705 |
+
return EncodeField
|
| 706 |
+
|
| 707 |
+
|
| 708 |
+
def BytesEncoder(field_number, is_repeated, is_packed):
|
| 709 |
+
"""Returns an encoder for a bytes field."""
|
| 710 |
+
|
| 711 |
+
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 712 |
+
local_EncodeVarint = _EncodeVarint
|
| 713 |
+
local_len = len
|
| 714 |
+
assert not is_packed
|
| 715 |
+
if is_repeated:
|
| 716 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 717 |
+
for element in value:
|
| 718 |
+
write(tag)
|
| 719 |
+
local_EncodeVarint(write, local_len(element), deterministic)
|
| 720 |
+
write(element)
|
| 721 |
+
return EncodeRepeatedField
|
| 722 |
+
else:
|
| 723 |
+
def EncodeField(write, value, deterministic):
|
| 724 |
+
write(tag)
|
| 725 |
+
local_EncodeVarint(write, local_len(value), deterministic)
|
| 726 |
+
return write(value)
|
| 727 |
+
return EncodeField
|
| 728 |
+
|
| 729 |
+
|
| 730 |
+
def GroupEncoder(field_number, is_repeated, is_packed):
|
| 731 |
+
"""Returns an encoder for a group field."""
|
| 732 |
+
|
| 733 |
+
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
|
| 734 |
+
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
|
| 735 |
+
assert not is_packed
|
| 736 |
+
if is_repeated:
|
| 737 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 738 |
+
for element in value:
|
| 739 |
+
write(start_tag)
|
| 740 |
+
element._InternalSerialize(write, deterministic)
|
| 741 |
+
write(end_tag)
|
| 742 |
+
return EncodeRepeatedField
|
| 743 |
+
else:
|
| 744 |
+
def EncodeField(write, value, deterministic):
|
| 745 |
+
write(start_tag)
|
| 746 |
+
value._InternalSerialize(write, deterministic)
|
| 747 |
+
return write(end_tag)
|
| 748 |
+
return EncodeField
|
| 749 |
+
|
| 750 |
+
|
| 751 |
+
def MessageEncoder(field_number, is_repeated, is_packed):
|
| 752 |
+
"""Returns an encoder for a message field."""
|
| 753 |
+
|
| 754 |
+
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
| 755 |
+
local_EncodeVarint = _EncodeVarint
|
| 756 |
+
assert not is_packed
|
| 757 |
+
if is_repeated:
|
| 758 |
+
def EncodeRepeatedField(write, value, deterministic):
|
| 759 |
+
for element in value:
|
| 760 |
+
write(tag)
|
| 761 |
+
local_EncodeVarint(write, element.ByteSize(), deterministic)
|
| 762 |
+
element._InternalSerialize(write, deterministic)
|
| 763 |
+
return EncodeRepeatedField
|
| 764 |
+
else:
|
| 765 |
+
def EncodeField(write, value, deterministic):
|
| 766 |
+
write(tag)
|
| 767 |
+
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
| 768 |
+
return value._InternalSerialize(write, deterministic)
|
| 769 |
+
return EncodeField
|
| 770 |
+
|
| 771 |
+
|
| 772 |
+
# --------------------------------------------------------------------
|
| 773 |
+
# As before, MessageSet is special.
|
| 774 |
+
|
| 775 |
+
|
| 776 |
+
def MessageSetItemEncoder(field_number):
|
| 777 |
+
"""Encoder for extensions of MessageSet.
|
| 778 |
+
|
| 779 |
+
The message set message looks like this:
|
| 780 |
+
message MessageSet {
|
| 781 |
+
repeated group Item = 1 {
|
| 782 |
+
required int32 type_id = 2;
|
| 783 |
+
required string message = 3;
|
| 784 |
+
}
|
| 785 |
+
}
|
| 786 |
+
"""
|
| 787 |
+
start_bytes = b"".join([
|
| 788 |
+
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
|
| 789 |
+
TagBytes(2, wire_format.WIRETYPE_VARINT),
|
| 790 |
+
_VarintBytes(field_number),
|
| 791 |
+
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
|
| 792 |
+
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
| 793 |
+
local_EncodeVarint = _EncodeVarint
|
| 794 |
+
|
| 795 |
+
def EncodeField(write, value, deterministic):
|
| 796 |
+
write(start_bytes)
|
| 797 |
+
local_EncodeVarint(write, value.ByteSize(), deterministic)
|
| 798 |
+
value._InternalSerialize(write, deterministic)
|
| 799 |
+
return write(end_bytes)
|
| 800 |
+
|
| 801 |
+
return EncodeField
|
| 802 |
+
|
| 803 |
+
|
| 804 |
+
# --------------------------------------------------------------------
|
| 805 |
+
# As before, Map is special.
|
| 806 |
+
|
| 807 |
+
|
| 808 |
+
def MapEncoder(field_descriptor):
|
| 809 |
+
"""Encoder for extensions of MessageSet.
|
| 810 |
+
|
| 811 |
+
Maps always have a wire format like this:
|
| 812 |
+
message MapEntry {
|
| 813 |
+
key_type key = 1;
|
| 814 |
+
value_type value = 2;
|
| 815 |
+
}
|
| 816 |
+
repeated MapEntry map = N;
|
| 817 |
+
"""
|
| 818 |
+
# Can't look at field_descriptor.message_type._concrete_class because it may
|
| 819 |
+
# not have been initialized yet.
|
| 820 |
+
message_type = field_descriptor.message_type
|
| 821 |
+
encode_message = MessageEncoder(field_descriptor.number, False, False)
|
| 822 |
+
|
| 823 |
+
def EncodeField(write, value, deterministic):
|
| 824 |
+
value_keys = sorted(value.keys()) if deterministic else value
|
| 825 |
+
for key in value_keys:
|
| 826 |
+
entry_msg = message_type._concrete_class(key=key, value=value[key])
|
| 827 |
+
encode_message(write, entry_msg, deterministic)
|
| 828 |
+
|
| 829 |
+
return EncodeField
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/enum_type_wrapper.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""A simple wrapper around enum types to expose utility functions.
|
| 32 |
+
|
| 33 |
+
Instances are created as properties with the same name as the enum they wrap
|
| 34 |
+
on proto classes. For usage, see:
|
| 35 |
+
reflection_test.py
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class EnumTypeWrapper(object):
|
| 42 |
+
"""A utility for finding the names of enum values."""
|
| 43 |
+
|
| 44 |
+
DESCRIPTOR = None
|
| 45 |
+
|
| 46 |
+
# This is a type alias, which mypy typing stubs can type as
|
| 47 |
+
# a genericized parameter constrained to an int, allowing subclasses
|
| 48 |
+
# to be typed with more constraint in .pyi stubs
|
| 49 |
+
# Eg.
|
| 50 |
+
# def MyGeneratedEnum(Message):
|
| 51 |
+
# ValueType = NewType('ValueType', int)
|
| 52 |
+
# def Name(self, number: MyGeneratedEnum.ValueType) -> str
|
| 53 |
+
ValueType = int
|
| 54 |
+
|
| 55 |
+
def __init__(self, enum_type):
|
| 56 |
+
"""Inits EnumTypeWrapper with an EnumDescriptor."""
|
| 57 |
+
self._enum_type = enum_type
|
| 58 |
+
self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
|
| 59 |
+
|
| 60 |
+
def Name(self, number): # pylint: disable=invalid-name
|
| 61 |
+
"""Returns a string containing the name of an enum value."""
|
| 62 |
+
try:
|
| 63 |
+
return self._enum_type.values_by_number[number].name
|
| 64 |
+
except KeyError:
|
| 65 |
+
pass # fall out to break exception chaining
|
| 66 |
+
|
| 67 |
+
if not isinstance(number, int):
|
| 68 |
+
raise TypeError(
|
| 69 |
+
'Enum value for {} must be an int, but got {} {!r}.'.format(
|
| 70 |
+
self._enum_type.name, type(number), number))
|
| 71 |
+
else:
|
| 72 |
+
# repr here to handle the odd case when you pass in a boolean.
|
| 73 |
+
raise ValueError('Enum {} has no name defined for value {!r}'.format(
|
| 74 |
+
self._enum_type.name, number))
|
| 75 |
+
|
| 76 |
+
def Value(self, name): # pylint: disable=invalid-name
|
| 77 |
+
"""Returns the value corresponding to the given enum name."""
|
| 78 |
+
try:
|
| 79 |
+
return self._enum_type.values_by_name[name].number
|
| 80 |
+
except KeyError:
|
| 81 |
+
pass # fall out to break exception chaining
|
| 82 |
+
raise ValueError('Enum {} has no value defined for name {!r}'.format(
|
| 83 |
+
self._enum_type.name, name))
|
| 84 |
+
|
| 85 |
+
def keys(self):
|
| 86 |
+
"""Return a list of the string names in the enum.
|
| 87 |
+
|
| 88 |
+
Returns:
|
| 89 |
+
A list of strs, in the order they were defined in the .proto file.
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
return [value_descriptor.name
|
| 93 |
+
for value_descriptor in self._enum_type.values]
|
| 94 |
+
|
| 95 |
+
def values(self):
|
| 96 |
+
"""Return a list of the integer values in the enum.
|
| 97 |
+
|
| 98 |
+
Returns:
|
| 99 |
+
A list of ints, in the order they were defined in the .proto file.
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
return [value_descriptor.number
|
| 103 |
+
for value_descriptor in self._enum_type.values]
|
| 104 |
+
|
| 105 |
+
def items(self):
|
| 106 |
+
"""Return a list of the (name, value) pairs of the enum.
|
| 107 |
+
|
| 108 |
+
Returns:
|
| 109 |
+
A list of (str, int) pairs, in the order they were defined
|
| 110 |
+
in the .proto file.
|
| 111 |
+
"""
|
| 112 |
+
return [(value_descriptor.name, value_descriptor.number)
|
| 113 |
+
for value_descriptor in self._enum_type.values]
|
| 114 |
+
|
| 115 |
+
def __getattr__(self, name):
|
| 116 |
+
"""Returns the value corresponding to the given enum name."""
|
| 117 |
+
try:
|
| 118 |
+
return super(
|
| 119 |
+
EnumTypeWrapper,
|
| 120 |
+
self).__getattribute__('_enum_type').values_by_name[name].number
|
| 121 |
+
except KeyError:
|
| 122 |
+
pass # fall out to break exception chaining
|
| 123 |
+
raise AttributeError('Enum {} has no value defined for name {!r}'.format(
|
| 124 |
+
self._enum_type.name, name))
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/extension_dict.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Contains _ExtensionDict class to represent extensions.
|
| 32 |
+
"""
|
| 33 |
+
|
| 34 |
+
from google.protobuf.internal import type_checkers
|
| 35 |
+
from google.protobuf.descriptor import FieldDescriptor
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def _VerifyExtensionHandle(message, extension_handle):
|
| 39 |
+
"""Verify that the given extension handle is valid."""
|
| 40 |
+
|
| 41 |
+
if not isinstance(extension_handle, FieldDescriptor):
|
| 42 |
+
raise KeyError('HasExtension() expects an extension handle, got: %s' %
|
| 43 |
+
extension_handle)
|
| 44 |
+
|
| 45 |
+
if not extension_handle.is_extension:
|
| 46 |
+
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
|
| 47 |
+
|
| 48 |
+
if not extension_handle.containing_type:
|
| 49 |
+
raise KeyError('"%s" is missing a containing_type.'
|
| 50 |
+
% extension_handle.full_name)
|
| 51 |
+
|
| 52 |
+
if extension_handle.containing_type is not message.DESCRIPTOR:
|
| 53 |
+
raise KeyError('Extension "%s" extends message type "%s", but this '
|
| 54 |
+
'message is of type "%s".' %
|
| 55 |
+
(extension_handle.full_name,
|
| 56 |
+
extension_handle.containing_type.full_name,
|
| 57 |
+
message.DESCRIPTOR.full_name))
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# TODO(robinson): Unify error handling of "unknown extension" crap.
|
| 61 |
+
# TODO(robinson): Support iteritems()-style iteration over all
|
| 62 |
+
# extensions with the "has" bits turned on?
|
| 63 |
+
class _ExtensionDict(object):
|
| 64 |
+
|
| 65 |
+
"""Dict-like container for Extension fields on proto instances.
|
| 66 |
+
|
| 67 |
+
Note that in all cases we expect extension handles to be
|
| 68 |
+
FieldDescriptors.
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
def __init__(self, extended_message):
|
| 72 |
+
"""
|
| 73 |
+
Args:
|
| 74 |
+
extended_message: Message instance for which we are the Extensions dict.
|
| 75 |
+
"""
|
| 76 |
+
self._extended_message = extended_message
|
| 77 |
+
|
| 78 |
+
def __getitem__(self, extension_handle):
|
| 79 |
+
"""Returns the current value of the given extension handle."""
|
| 80 |
+
|
| 81 |
+
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
| 82 |
+
|
| 83 |
+
result = self._extended_message._fields.get(extension_handle)
|
| 84 |
+
if result is not None:
|
| 85 |
+
return result
|
| 86 |
+
|
| 87 |
+
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
| 88 |
+
result = extension_handle._default_constructor(self._extended_message)
|
| 89 |
+
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
| 90 |
+
message_type = extension_handle.message_type
|
| 91 |
+
if not hasattr(message_type, '_concrete_class'):
|
| 92 |
+
# pylint: disable=protected-access
|
| 93 |
+
self._extended_message._FACTORY.GetPrototype(message_type)
|
| 94 |
+
assert getattr(extension_handle.message_type, '_concrete_class', None), (
|
| 95 |
+
'Uninitialized concrete class found for field %r (message type %r)'
|
| 96 |
+
% (extension_handle.full_name,
|
| 97 |
+
extension_handle.message_type.full_name))
|
| 98 |
+
result = extension_handle.message_type._concrete_class()
|
| 99 |
+
try:
|
| 100 |
+
result._SetListener(self._extended_message._listener_for_children)
|
| 101 |
+
except ReferenceError:
|
| 102 |
+
pass
|
| 103 |
+
else:
|
| 104 |
+
# Singular scalar -- just return the default without inserting into the
|
| 105 |
+
# dict.
|
| 106 |
+
return extension_handle.default_value
|
| 107 |
+
|
| 108 |
+
# Atomically check if another thread has preempted us and, if not, swap
|
| 109 |
+
# in the new object we just created. If someone has preempted us, we
|
| 110 |
+
# take that object and discard ours.
|
| 111 |
+
# WARNING: We are relying on setdefault() being atomic. This is true
|
| 112 |
+
# in CPython but we haven't investigated others. This warning appears
|
| 113 |
+
# in several other locations in this file.
|
| 114 |
+
result = self._extended_message._fields.setdefault(
|
| 115 |
+
extension_handle, result)
|
| 116 |
+
|
| 117 |
+
return result
|
| 118 |
+
|
| 119 |
+
def __eq__(self, other):
|
| 120 |
+
if not isinstance(other, self.__class__):
|
| 121 |
+
return False
|
| 122 |
+
|
| 123 |
+
my_fields = self._extended_message.ListFields()
|
| 124 |
+
other_fields = other._extended_message.ListFields()
|
| 125 |
+
|
| 126 |
+
# Get rid of non-extension fields.
|
| 127 |
+
my_fields = [field for field in my_fields if field.is_extension]
|
| 128 |
+
other_fields = [field for field in other_fields if field.is_extension]
|
| 129 |
+
|
| 130 |
+
return my_fields == other_fields
|
| 131 |
+
|
| 132 |
+
def __ne__(self, other):
|
| 133 |
+
return not self == other
|
| 134 |
+
|
| 135 |
+
def __len__(self):
|
| 136 |
+
fields = self._extended_message.ListFields()
|
| 137 |
+
# Get rid of non-extension fields.
|
| 138 |
+
extension_fields = [field for field in fields if field[0].is_extension]
|
| 139 |
+
return len(extension_fields)
|
| 140 |
+
|
| 141 |
+
def __hash__(self):
|
| 142 |
+
raise TypeError('unhashable object')
|
| 143 |
+
|
| 144 |
+
# Note that this is only meaningful for non-repeated, scalar extension
|
| 145 |
+
# fields. Note also that we may have to call _Modified() when we do
|
| 146 |
+
# successfully set a field this way, to set any necessary "has" bits in the
|
| 147 |
+
# ancestors of the extended message.
|
| 148 |
+
def __setitem__(self, extension_handle, value):
|
| 149 |
+
"""If extension_handle specifies a non-repeated, scalar extension
|
| 150 |
+
field, sets the value of that field.
|
| 151 |
+
"""
|
| 152 |
+
|
| 153 |
+
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
| 154 |
+
|
| 155 |
+
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
|
| 156 |
+
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
|
| 157 |
+
raise TypeError(
|
| 158 |
+
'Cannot assign to extension "%s" because it is a repeated or '
|
| 159 |
+
'composite type.' % extension_handle.full_name)
|
| 160 |
+
|
| 161 |
+
# It's slightly wasteful to lookup the type checker each time,
|
| 162 |
+
# but we expect this to be a vanishingly uncommon case anyway.
|
| 163 |
+
type_checker = type_checkers.GetTypeChecker(extension_handle)
|
| 164 |
+
# pylint: disable=protected-access
|
| 165 |
+
self._extended_message._fields[extension_handle] = (
|
| 166 |
+
type_checker.CheckValue(value))
|
| 167 |
+
self._extended_message._Modified()
|
| 168 |
+
|
| 169 |
+
def __delitem__(self, extension_handle):
|
| 170 |
+
self._extended_message.ClearExtension(extension_handle)
|
| 171 |
+
|
| 172 |
+
def _FindExtensionByName(self, name):
|
| 173 |
+
"""Tries to find a known extension with the specified name.
|
| 174 |
+
|
| 175 |
+
Args:
|
| 176 |
+
name: Extension full name.
|
| 177 |
+
|
| 178 |
+
Returns:
|
| 179 |
+
Extension field descriptor.
|
| 180 |
+
"""
|
| 181 |
+
return self._extended_message._extensions_by_name.get(name, None)
|
| 182 |
+
|
| 183 |
+
def _FindExtensionByNumber(self, number):
|
| 184 |
+
"""Tries to find a known extension with the field number.
|
| 185 |
+
|
| 186 |
+
Args:
|
| 187 |
+
number: Extension field number.
|
| 188 |
+
|
| 189 |
+
Returns:
|
| 190 |
+
Extension field descriptor.
|
| 191 |
+
"""
|
| 192 |
+
return self._extended_message._extensions_by_number.get(number, None)
|
| 193 |
+
|
| 194 |
+
def __iter__(self):
|
| 195 |
+
# Return a generator over the populated extension fields
|
| 196 |
+
return (f[0] for f in self._extended_message.ListFields()
|
| 197 |
+
if f[0].is_extension)
|
| 198 |
+
|
| 199 |
+
def __contains__(self, extension_handle):
|
| 200 |
+
_VerifyExtensionHandle(self._extended_message, extension_handle)
|
| 201 |
+
|
| 202 |
+
if extension_handle not in self._extended_message._fields:
|
| 203 |
+
return False
|
| 204 |
+
|
| 205 |
+
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
|
| 206 |
+
return bool(self._extended_message._fields.get(extension_handle))
|
| 207 |
+
|
| 208 |
+
if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
| 209 |
+
value = self._extended_message._fields.get(extension_handle)
|
| 210 |
+
# pylint: disable=protected-access
|
| 211 |
+
return value is not None and value._is_present_in_parent
|
| 212 |
+
|
| 213 |
+
return True
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/message_listener.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Defines a listener interface for observing certain
|
| 32 |
+
state transitions on Message objects.
|
| 33 |
+
|
| 34 |
+
Also defines a null implementation of this interface.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class MessageListener(object):
|
| 41 |
+
|
| 42 |
+
"""Listens for modifications made to a message. Meant to be registered via
|
| 43 |
+
Message._SetListener().
|
| 44 |
+
|
| 45 |
+
Attributes:
|
| 46 |
+
dirty: If True, then calling Modified() would be a no-op. This can be
|
| 47 |
+
used to avoid these calls entirely in the common case.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
def Modified(self):
|
| 51 |
+
"""Called every time the message is modified in such a way that the parent
|
| 52 |
+
message may need to be updated. This currently means either:
|
| 53 |
+
(a) The message was modified for the first time, so the parent message
|
| 54 |
+
should henceforth mark the message as present.
|
| 55 |
+
(b) The message's cached byte size became dirty -- i.e. the message was
|
| 56 |
+
modified for the first time after a previous call to ByteSize().
|
| 57 |
+
Therefore the parent should also mark its byte size as dirty.
|
| 58 |
+
Note that (a) implies (b), since new objects start out with a client cached
|
| 59 |
+
size (zero). However, we document (a) explicitly because it is important.
|
| 60 |
+
|
| 61 |
+
Modified() will *only* be called in response to one of these two events --
|
| 62 |
+
not every time the sub-message is modified.
|
| 63 |
+
|
| 64 |
+
Note that if the listener's |dirty| attribute is true, then calling
|
| 65 |
+
Modified at the moment would be a no-op, so it can be skipped. Performance-
|
| 66 |
+
sensitive callers should check this attribute directly before calling since
|
| 67 |
+
it will be true most of the time.
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
raise NotImplementedError
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class NullMessageListener(object):
|
| 74 |
+
|
| 75 |
+
"""No-op MessageListener implementation."""
|
| 76 |
+
|
| 77 |
+
def Modified(self):
|
| 78 |
+
pass
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/python_message.py
ADDED
|
@@ -0,0 +1,1539 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
# This code is meant to work on Python 2.4 and above only.
|
| 32 |
+
#
|
| 33 |
+
# TODO(robinson): Helpers for verbose, common checks like seeing if a
|
| 34 |
+
# descriptor's cpp_type is CPPTYPE_MESSAGE.
|
| 35 |
+
|
| 36 |
+
"""Contains a metaclass and helper functions used to create
|
| 37 |
+
protocol message classes from Descriptor objects at runtime.
|
| 38 |
+
|
| 39 |
+
Recall that a metaclass is the "type" of a class.
|
| 40 |
+
(A class is to a metaclass what an instance is to a class.)
|
| 41 |
+
|
| 42 |
+
In this case, we use the GeneratedProtocolMessageType metaclass
|
| 43 |
+
to inject all the useful functionality into the classes
|
| 44 |
+
output by the protocol compiler at compile-time.
|
| 45 |
+
|
| 46 |
+
The upshot of all this is that the real implementation
|
| 47 |
+
details for ALL pure-Python protocol buffers are *here in
|
| 48 |
+
this file*.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 52 |
+
|
| 53 |
+
from io import BytesIO
|
| 54 |
+
import struct
|
| 55 |
+
import sys
|
| 56 |
+
import weakref
|
| 57 |
+
|
| 58 |
+
# We use "as" to avoid name collisions with variables.
|
| 59 |
+
from google.protobuf.internal import api_implementation
|
| 60 |
+
from google.protobuf.internal import containers
|
| 61 |
+
from google.protobuf.internal import decoder
|
| 62 |
+
from google.protobuf.internal import encoder
|
| 63 |
+
from google.protobuf.internal import enum_type_wrapper
|
| 64 |
+
from google.protobuf.internal import extension_dict
|
| 65 |
+
from google.protobuf.internal import message_listener as message_listener_mod
|
| 66 |
+
from google.protobuf.internal import type_checkers
|
| 67 |
+
from google.protobuf.internal import well_known_types
|
| 68 |
+
from google.protobuf.internal import wire_format
|
| 69 |
+
from google.protobuf import descriptor as descriptor_mod
|
| 70 |
+
from google.protobuf import message as message_mod
|
| 71 |
+
from google.protobuf import text_format
|
| 72 |
+
|
| 73 |
+
_FieldDescriptor = descriptor_mod.FieldDescriptor
|
| 74 |
+
_AnyFullTypeName = 'google.protobuf.Any'
|
| 75 |
+
_ExtensionDict = extension_dict._ExtensionDict
|
| 76 |
+
|
| 77 |
+
class GeneratedProtocolMessageType(type):
|
| 78 |
+
|
| 79 |
+
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
| 80 |
+
|
| 81 |
+
We add implementations for all methods described in the Message class. We
|
| 82 |
+
also create properties to allow getting/setting all fields in the protocol
|
| 83 |
+
message. Finally, we create slots to prevent users from accidentally
|
| 84 |
+
"setting" nonexistent fields in the protocol message, which then wouldn't get
|
| 85 |
+
serialized / deserialized properly.
|
| 86 |
+
|
| 87 |
+
The protocol compiler currently uses this metaclass to create protocol
|
| 88 |
+
message classes at runtime. Clients can also manually create their own
|
| 89 |
+
classes at runtime, as in this example:
|
| 90 |
+
|
| 91 |
+
mydescriptor = Descriptor(.....)
|
| 92 |
+
factory = symbol_database.Default()
|
| 93 |
+
factory.pool.AddDescriptor(mydescriptor)
|
| 94 |
+
MyProtoClass = factory.GetPrototype(mydescriptor)
|
| 95 |
+
myproto_instance = MyProtoClass()
|
| 96 |
+
myproto.foo_field = 23
|
| 97 |
+
...
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
# Must be consistent with the protocol-compiler code in
|
| 101 |
+
# proto2/compiler/internal/generator.*.
|
| 102 |
+
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
| 103 |
+
|
| 104 |
+
def __new__(cls, name, bases, dictionary):
|
| 105 |
+
"""Custom allocation for runtime-generated class types.
|
| 106 |
+
|
| 107 |
+
We override __new__ because this is apparently the only place
|
| 108 |
+
where we can meaningfully set __slots__ on the class we're creating(?).
|
| 109 |
+
(The interplay between metaclasses and slots is not very well-documented).
|
| 110 |
+
|
| 111 |
+
Args:
|
| 112 |
+
name: Name of the class (ignored, but required by the
|
| 113 |
+
metaclass protocol).
|
| 114 |
+
bases: Base classes of the class we're constructing.
|
| 115 |
+
(Should be message.Message). We ignore this field, but
|
| 116 |
+
it's required by the metaclass protocol
|
| 117 |
+
dictionary: The class dictionary of the class we're
|
| 118 |
+
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
| 119 |
+
a Descriptor object describing this protocol message
|
| 120 |
+
type.
|
| 121 |
+
|
| 122 |
+
Returns:
|
| 123 |
+
Newly-allocated class.
|
| 124 |
+
|
| 125 |
+
Raises:
|
| 126 |
+
RuntimeError: Generated code only work with python cpp extension.
|
| 127 |
+
"""
|
| 128 |
+
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
| 129 |
+
|
| 130 |
+
if isinstance(descriptor, str):
|
| 131 |
+
raise RuntimeError('The generated code only work with python cpp '
|
| 132 |
+
'extension, but it is using pure python runtime.')
|
| 133 |
+
|
| 134 |
+
# If a concrete class already exists for this descriptor, don't try to
|
| 135 |
+
# create another. Doing so will break any messages that already exist with
|
| 136 |
+
# the existing class.
|
| 137 |
+
#
|
| 138 |
+
# The C++ implementation appears to have its own internal `PyMessageFactory`
|
| 139 |
+
# to achieve similar results.
|
| 140 |
+
#
|
| 141 |
+
# This most commonly happens in `text_format.py` when using descriptors from
|
| 142 |
+
# a custom pool; it calls symbol_database.Global().getPrototype() on a
|
| 143 |
+
# descriptor which already has an existing concrete class.
|
| 144 |
+
new_class = getattr(descriptor, '_concrete_class', None)
|
| 145 |
+
if new_class:
|
| 146 |
+
return new_class
|
| 147 |
+
|
| 148 |
+
if descriptor.full_name in well_known_types.WKTBASES:
|
| 149 |
+
bases += (well_known_types.WKTBASES[descriptor.full_name],)
|
| 150 |
+
_AddClassAttributesForNestedExtensions(descriptor, dictionary)
|
| 151 |
+
_AddSlots(descriptor, dictionary)
|
| 152 |
+
|
| 153 |
+
superclass = super(GeneratedProtocolMessageType, cls)
|
| 154 |
+
new_class = superclass.__new__(cls, name, bases, dictionary)
|
| 155 |
+
return new_class
|
| 156 |
+
|
| 157 |
+
def __init__(cls, name, bases, dictionary):
|
| 158 |
+
"""Here we perform the majority of our work on the class.
|
| 159 |
+
We add enum getters, an __init__ method, implementations
|
| 160 |
+
of all Message methods, and properties for all fields
|
| 161 |
+
in the protocol type.
|
| 162 |
+
|
| 163 |
+
Args:
|
| 164 |
+
name: Name of the class (ignored, but required by the
|
| 165 |
+
metaclass protocol).
|
| 166 |
+
bases: Base classes of the class we're constructing.
|
| 167 |
+
(Should be message.Message). We ignore this field, but
|
| 168 |
+
it's required by the metaclass protocol
|
| 169 |
+
dictionary: The class dictionary of the class we're
|
| 170 |
+
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
| 171 |
+
a Descriptor object describing this protocol message
|
| 172 |
+
type.
|
| 173 |
+
"""
|
| 174 |
+
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
| 175 |
+
|
| 176 |
+
# If this is an _existing_ class looked up via `_concrete_class` in the
|
| 177 |
+
# __new__ method above, then we don't need to re-initialize anything.
|
| 178 |
+
existing_class = getattr(descriptor, '_concrete_class', None)
|
| 179 |
+
if existing_class:
|
| 180 |
+
assert existing_class is cls, (
|
| 181 |
+
'Duplicate `GeneratedProtocolMessageType` created for descriptor %r'
|
| 182 |
+
% (descriptor.full_name))
|
| 183 |
+
return
|
| 184 |
+
|
| 185 |
+
cls._decoders_by_tag = {}
|
| 186 |
+
if (descriptor.has_options and
|
| 187 |
+
descriptor.GetOptions().message_set_wire_format):
|
| 188 |
+
cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
|
| 189 |
+
decoder.MessageSetItemDecoder(descriptor), None)
|
| 190 |
+
|
| 191 |
+
# Attach stuff to each FieldDescriptor for quick lookup later on.
|
| 192 |
+
for field in descriptor.fields:
|
| 193 |
+
_AttachFieldHelpers(cls, field)
|
| 194 |
+
|
| 195 |
+
descriptor._concrete_class = cls # pylint: disable=protected-access
|
| 196 |
+
_AddEnumValues(descriptor, cls)
|
| 197 |
+
_AddInitMethod(descriptor, cls)
|
| 198 |
+
_AddPropertiesForFields(descriptor, cls)
|
| 199 |
+
_AddPropertiesForExtensions(descriptor, cls)
|
| 200 |
+
_AddStaticMethods(cls)
|
| 201 |
+
_AddMessageMethods(descriptor, cls)
|
| 202 |
+
_AddPrivateHelperMethods(descriptor, cls)
|
| 203 |
+
|
| 204 |
+
superclass = super(GeneratedProtocolMessageType, cls)
|
| 205 |
+
superclass.__init__(name, bases, dictionary)
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
# Stateless helpers for GeneratedProtocolMessageType below.
|
| 209 |
+
# Outside clients should not access these directly.
|
| 210 |
+
#
|
| 211 |
+
# I opted not to make any of these methods on the metaclass, to make it more
|
| 212 |
+
# clear that I'm not really using any state there and to keep clients from
|
| 213 |
+
# thinking that they have direct access to these construction helpers.
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
def _PropertyName(proto_field_name):
|
| 217 |
+
"""Returns the name of the public property attribute which
|
| 218 |
+
clients can use to get and (in some cases) set the value
|
| 219 |
+
of a protocol message field.
|
| 220 |
+
|
| 221 |
+
Args:
|
| 222 |
+
proto_field_name: The protocol message field name, exactly
|
| 223 |
+
as it appears (or would appear) in a .proto file.
|
| 224 |
+
"""
|
| 225 |
+
# TODO(robinson): Escape Python keywords (e.g., yield), and test this support.
|
| 226 |
+
# nnorwitz makes my day by writing:
|
| 227 |
+
# """
|
| 228 |
+
# FYI. See the keyword module in the stdlib. This could be as simple as:
|
| 229 |
+
#
|
| 230 |
+
# if keyword.iskeyword(proto_field_name):
|
| 231 |
+
# return proto_field_name + "_"
|
| 232 |
+
# return proto_field_name
|
| 233 |
+
# """
|
| 234 |
+
# Kenton says: The above is a BAD IDEA. People rely on being able to use
|
| 235 |
+
# getattr() and setattr() to reflectively manipulate field values. If we
|
| 236 |
+
# rename the properties, then every such user has to also make sure to apply
|
| 237 |
+
# the same transformation. Note that currently if you name a field "yield",
|
| 238 |
+
# you can still access it just fine using getattr/setattr -- it's not even
|
| 239 |
+
# that cumbersome to do so.
|
| 240 |
+
# TODO(kenton): Remove this method entirely if/when everyone agrees with my
|
| 241 |
+
# position.
|
| 242 |
+
return proto_field_name
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
def _AddSlots(message_descriptor, dictionary):
|
| 246 |
+
"""Adds a __slots__ entry to dictionary, containing the names of all valid
|
| 247 |
+
attributes for this message type.
|
| 248 |
+
|
| 249 |
+
Args:
|
| 250 |
+
message_descriptor: A Descriptor instance describing this message type.
|
| 251 |
+
dictionary: Class dictionary to which we'll add a '__slots__' entry.
|
| 252 |
+
"""
|
| 253 |
+
dictionary['__slots__'] = ['_cached_byte_size',
|
| 254 |
+
'_cached_byte_size_dirty',
|
| 255 |
+
'_fields',
|
| 256 |
+
'_unknown_fields',
|
| 257 |
+
'_unknown_field_set',
|
| 258 |
+
'_is_present_in_parent',
|
| 259 |
+
'_listener',
|
| 260 |
+
'_listener_for_children',
|
| 261 |
+
'__weakref__',
|
| 262 |
+
'_oneofs']
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
def _IsMessageSetExtension(field):
|
| 266 |
+
return (field.is_extension and
|
| 267 |
+
field.containing_type.has_options and
|
| 268 |
+
field.containing_type.GetOptions().message_set_wire_format and
|
| 269 |
+
field.type == _FieldDescriptor.TYPE_MESSAGE and
|
| 270 |
+
field.label == _FieldDescriptor.LABEL_OPTIONAL)
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
def _IsMapField(field):
|
| 274 |
+
return (field.type == _FieldDescriptor.TYPE_MESSAGE and
|
| 275 |
+
field.message_type.has_options and
|
| 276 |
+
field.message_type.GetOptions().map_entry)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
def _IsMessageMapField(field):
|
| 280 |
+
value_type = field.message_type.fields_by_name['value']
|
| 281 |
+
return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def _AttachFieldHelpers(cls, field_descriptor):
|
| 285 |
+
is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
|
| 286 |
+
is_packable = (is_repeated and
|
| 287 |
+
wire_format.IsTypePackable(field_descriptor.type))
|
| 288 |
+
is_proto3 = field_descriptor.containing_type.syntax == 'proto3'
|
| 289 |
+
if not is_packable:
|
| 290 |
+
is_packed = False
|
| 291 |
+
elif field_descriptor.containing_type.syntax == 'proto2':
|
| 292 |
+
is_packed = (field_descriptor.has_options and
|
| 293 |
+
field_descriptor.GetOptions().packed)
|
| 294 |
+
else:
|
| 295 |
+
has_packed_false = (field_descriptor.has_options and
|
| 296 |
+
field_descriptor.GetOptions().HasField('packed') and
|
| 297 |
+
field_descriptor.GetOptions().packed == False)
|
| 298 |
+
is_packed = not has_packed_false
|
| 299 |
+
is_map_entry = _IsMapField(field_descriptor)
|
| 300 |
+
|
| 301 |
+
if is_map_entry:
|
| 302 |
+
field_encoder = encoder.MapEncoder(field_descriptor)
|
| 303 |
+
sizer = encoder.MapSizer(field_descriptor,
|
| 304 |
+
_IsMessageMapField(field_descriptor))
|
| 305 |
+
elif _IsMessageSetExtension(field_descriptor):
|
| 306 |
+
field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
|
| 307 |
+
sizer = encoder.MessageSetItemSizer(field_descriptor.number)
|
| 308 |
+
else:
|
| 309 |
+
field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
|
| 310 |
+
field_descriptor.number, is_repeated, is_packed)
|
| 311 |
+
sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
|
| 312 |
+
field_descriptor.number, is_repeated, is_packed)
|
| 313 |
+
|
| 314 |
+
field_descriptor._encoder = field_encoder
|
| 315 |
+
field_descriptor._sizer = sizer
|
| 316 |
+
field_descriptor._default_constructor = _DefaultValueConstructorForField(
|
| 317 |
+
field_descriptor)
|
| 318 |
+
|
| 319 |
+
def AddDecoder(wiretype, is_packed):
|
| 320 |
+
tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
|
| 321 |
+
decode_type = field_descriptor.type
|
| 322 |
+
if (decode_type == _FieldDescriptor.TYPE_ENUM and
|
| 323 |
+
type_checkers.SupportsOpenEnums(field_descriptor)):
|
| 324 |
+
decode_type = _FieldDescriptor.TYPE_INT32
|
| 325 |
+
|
| 326 |
+
oneof_descriptor = None
|
| 327 |
+
clear_if_default = False
|
| 328 |
+
if field_descriptor.containing_oneof is not None:
|
| 329 |
+
oneof_descriptor = field_descriptor
|
| 330 |
+
elif (is_proto3 and not is_repeated and
|
| 331 |
+
field_descriptor.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE):
|
| 332 |
+
clear_if_default = True
|
| 333 |
+
|
| 334 |
+
if is_map_entry:
|
| 335 |
+
is_message_map = _IsMessageMapField(field_descriptor)
|
| 336 |
+
|
| 337 |
+
field_decoder = decoder.MapDecoder(
|
| 338 |
+
field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
|
| 339 |
+
is_message_map)
|
| 340 |
+
elif decode_type == _FieldDescriptor.TYPE_STRING:
|
| 341 |
+
field_decoder = decoder.StringDecoder(
|
| 342 |
+
field_descriptor.number, is_repeated, is_packed,
|
| 343 |
+
field_descriptor, field_descriptor._default_constructor,
|
| 344 |
+
clear_if_default)
|
| 345 |
+
elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 346 |
+
field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
|
| 347 |
+
field_descriptor.number, is_repeated, is_packed,
|
| 348 |
+
field_descriptor, field_descriptor._default_constructor)
|
| 349 |
+
else:
|
| 350 |
+
field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
|
| 351 |
+
field_descriptor.number, is_repeated, is_packed,
|
| 352 |
+
# pylint: disable=protected-access
|
| 353 |
+
field_descriptor, field_descriptor._default_constructor,
|
| 354 |
+
clear_if_default)
|
| 355 |
+
|
| 356 |
+
cls._decoders_by_tag[tag_bytes] = (field_decoder, oneof_descriptor)
|
| 357 |
+
|
| 358 |
+
AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type],
|
| 359 |
+
False)
|
| 360 |
+
|
| 361 |
+
if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
|
| 362 |
+
# To support wire compatibility of adding packed = true, add a decoder for
|
| 363 |
+
# packed values regardless of the field's options.
|
| 364 |
+
AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
|
| 368 |
+
extensions = descriptor.extensions_by_name
|
| 369 |
+
for extension_name, extension_field in extensions.items():
|
| 370 |
+
assert extension_name not in dictionary
|
| 371 |
+
dictionary[extension_name] = extension_field
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def _AddEnumValues(descriptor, cls):
|
| 375 |
+
"""Sets class-level attributes for all enum fields defined in this message.
|
| 376 |
+
|
| 377 |
+
Also exporting a class-level object that can name enum values.
|
| 378 |
+
|
| 379 |
+
Args:
|
| 380 |
+
descriptor: Descriptor object for this message type.
|
| 381 |
+
cls: Class we're constructing for this message type.
|
| 382 |
+
"""
|
| 383 |
+
for enum_type in descriptor.enum_types:
|
| 384 |
+
setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
|
| 385 |
+
for enum_value in enum_type.values:
|
| 386 |
+
setattr(cls, enum_value.name, enum_value.number)
|
| 387 |
+
|
| 388 |
+
|
| 389 |
+
def _GetInitializeDefaultForMap(field):
|
| 390 |
+
if field.label != _FieldDescriptor.LABEL_REPEATED:
|
| 391 |
+
raise ValueError('map_entry set on non-repeated field %s' % (
|
| 392 |
+
field.name))
|
| 393 |
+
fields_by_name = field.message_type.fields_by_name
|
| 394 |
+
key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
|
| 395 |
+
|
| 396 |
+
value_field = fields_by_name['value']
|
| 397 |
+
if _IsMessageMapField(field):
|
| 398 |
+
def MakeMessageMapDefault(message):
|
| 399 |
+
return containers.MessageMap(
|
| 400 |
+
message._listener_for_children, value_field.message_type, key_checker,
|
| 401 |
+
field.message_type)
|
| 402 |
+
return MakeMessageMapDefault
|
| 403 |
+
else:
|
| 404 |
+
value_checker = type_checkers.GetTypeChecker(value_field)
|
| 405 |
+
def MakePrimitiveMapDefault(message):
|
| 406 |
+
return containers.ScalarMap(
|
| 407 |
+
message._listener_for_children, key_checker, value_checker,
|
| 408 |
+
field.message_type)
|
| 409 |
+
return MakePrimitiveMapDefault
|
| 410 |
+
|
| 411 |
+
def _DefaultValueConstructorForField(field):
|
| 412 |
+
"""Returns a function which returns a default value for a field.
|
| 413 |
+
|
| 414 |
+
Args:
|
| 415 |
+
field: FieldDescriptor object for this field.
|
| 416 |
+
|
| 417 |
+
The returned function has one argument:
|
| 418 |
+
message: Message instance containing this field, or a weakref proxy
|
| 419 |
+
of same.
|
| 420 |
+
|
| 421 |
+
That function in turn returns a default value for this field. The default
|
| 422 |
+
value may refer back to |message| via a weak reference.
|
| 423 |
+
"""
|
| 424 |
+
|
| 425 |
+
if _IsMapField(field):
|
| 426 |
+
return _GetInitializeDefaultForMap(field)
|
| 427 |
+
|
| 428 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 429 |
+
if field.has_default_value and field.default_value != []:
|
| 430 |
+
raise ValueError('Repeated field default value not empty list: %s' % (
|
| 431 |
+
field.default_value))
|
| 432 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 433 |
+
# We can't look at _concrete_class yet since it might not have
|
| 434 |
+
# been set. (Depends on order in which we initialize the classes).
|
| 435 |
+
message_type = field.message_type
|
| 436 |
+
def MakeRepeatedMessageDefault(message):
|
| 437 |
+
return containers.RepeatedCompositeFieldContainer(
|
| 438 |
+
message._listener_for_children, field.message_type)
|
| 439 |
+
return MakeRepeatedMessageDefault
|
| 440 |
+
else:
|
| 441 |
+
type_checker = type_checkers.GetTypeChecker(field)
|
| 442 |
+
def MakeRepeatedScalarDefault(message):
|
| 443 |
+
return containers.RepeatedScalarFieldContainer(
|
| 444 |
+
message._listener_for_children, type_checker)
|
| 445 |
+
return MakeRepeatedScalarDefault
|
| 446 |
+
|
| 447 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 448 |
+
# _concrete_class may not yet be initialized.
|
| 449 |
+
message_type = field.message_type
|
| 450 |
+
def MakeSubMessageDefault(message):
|
| 451 |
+
assert getattr(message_type, '_concrete_class', None), (
|
| 452 |
+
'Uninitialized concrete class found for field %r (message type %r)'
|
| 453 |
+
% (field.full_name, message_type.full_name))
|
| 454 |
+
result = message_type._concrete_class()
|
| 455 |
+
result._SetListener(
|
| 456 |
+
_OneofListener(message, field)
|
| 457 |
+
if field.containing_oneof is not None
|
| 458 |
+
else message._listener_for_children)
|
| 459 |
+
return result
|
| 460 |
+
return MakeSubMessageDefault
|
| 461 |
+
|
| 462 |
+
def MakeScalarDefault(message):
|
| 463 |
+
# TODO(protobuf-team): This may be broken since there may not be
|
| 464 |
+
# default_value. Combine with has_default_value somehow.
|
| 465 |
+
return field.default_value
|
| 466 |
+
return MakeScalarDefault
|
| 467 |
+
|
| 468 |
+
|
| 469 |
+
def _ReraiseTypeErrorWithFieldName(message_name, field_name):
|
| 470 |
+
"""Re-raise the currently-handled TypeError with the field name added."""
|
| 471 |
+
exc = sys.exc_info()[1]
|
| 472 |
+
if len(exc.args) == 1 and type(exc) is TypeError:
|
| 473 |
+
# simple TypeError; add field name to exception message
|
| 474 |
+
exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
|
| 475 |
+
|
| 476 |
+
# re-raise possibly-amended exception with original traceback:
|
| 477 |
+
raise exc.with_traceback(sys.exc_info()[2])
|
| 478 |
+
|
| 479 |
+
|
| 480 |
+
def _AddInitMethod(message_descriptor, cls):
|
| 481 |
+
"""Adds an __init__ method to cls."""
|
| 482 |
+
|
| 483 |
+
def _GetIntegerEnumValue(enum_type, value):
|
| 484 |
+
"""Convert a string or integer enum value to an integer.
|
| 485 |
+
|
| 486 |
+
If the value is a string, it is converted to the enum value in
|
| 487 |
+
enum_type with the same name. If the value is not a string, it's
|
| 488 |
+
returned as-is. (No conversion or bounds-checking is done.)
|
| 489 |
+
"""
|
| 490 |
+
if isinstance(value, str):
|
| 491 |
+
try:
|
| 492 |
+
return enum_type.values_by_name[value].number
|
| 493 |
+
except KeyError:
|
| 494 |
+
raise ValueError('Enum type %s: unknown label "%s"' % (
|
| 495 |
+
enum_type.full_name, value))
|
| 496 |
+
return value
|
| 497 |
+
|
| 498 |
+
def init(self, **kwargs):
|
| 499 |
+
self._cached_byte_size = 0
|
| 500 |
+
self._cached_byte_size_dirty = len(kwargs) > 0
|
| 501 |
+
self._fields = {}
|
| 502 |
+
# Contains a mapping from oneof field descriptors to the descriptor
|
| 503 |
+
# of the currently set field in that oneof field.
|
| 504 |
+
self._oneofs = {}
|
| 505 |
+
|
| 506 |
+
# _unknown_fields is () when empty for efficiency, and will be turned into
|
| 507 |
+
# a list if fields are added.
|
| 508 |
+
self._unknown_fields = ()
|
| 509 |
+
# _unknown_field_set is None when empty for efficiency, and will be
|
| 510 |
+
# turned into UnknownFieldSet struct if fields are added.
|
| 511 |
+
self._unknown_field_set = None # pylint: disable=protected-access
|
| 512 |
+
self._is_present_in_parent = False
|
| 513 |
+
self._listener = message_listener_mod.NullMessageListener()
|
| 514 |
+
self._listener_for_children = _Listener(self)
|
| 515 |
+
for field_name, field_value in kwargs.items():
|
| 516 |
+
field = _GetFieldByName(message_descriptor, field_name)
|
| 517 |
+
if field is None:
|
| 518 |
+
raise TypeError('%s() got an unexpected keyword argument "%s"' %
|
| 519 |
+
(message_descriptor.name, field_name))
|
| 520 |
+
if field_value is None:
|
| 521 |
+
# field=None is the same as no field at all.
|
| 522 |
+
continue
|
| 523 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 524 |
+
copy = field._default_constructor(self)
|
| 525 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
|
| 526 |
+
if _IsMapField(field):
|
| 527 |
+
if _IsMessageMapField(field):
|
| 528 |
+
for key in field_value:
|
| 529 |
+
copy[key].MergeFrom(field_value[key])
|
| 530 |
+
else:
|
| 531 |
+
copy.update(field_value)
|
| 532 |
+
else:
|
| 533 |
+
for val in field_value:
|
| 534 |
+
if isinstance(val, dict):
|
| 535 |
+
copy.add(**val)
|
| 536 |
+
else:
|
| 537 |
+
copy.add().MergeFrom(val)
|
| 538 |
+
else: # Scalar
|
| 539 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
| 540 |
+
field_value = [_GetIntegerEnumValue(field.enum_type, val)
|
| 541 |
+
for val in field_value]
|
| 542 |
+
copy.extend(field_value)
|
| 543 |
+
self._fields[field] = copy
|
| 544 |
+
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 545 |
+
copy = field._default_constructor(self)
|
| 546 |
+
new_val = field_value
|
| 547 |
+
if isinstance(field_value, dict):
|
| 548 |
+
new_val = field.message_type._concrete_class(**field_value)
|
| 549 |
+
try:
|
| 550 |
+
copy.MergeFrom(new_val)
|
| 551 |
+
except TypeError:
|
| 552 |
+
_ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
|
| 553 |
+
self._fields[field] = copy
|
| 554 |
+
else:
|
| 555 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
| 556 |
+
field_value = _GetIntegerEnumValue(field.enum_type, field_value)
|
| 557 |
+
try:
|
| 558 |
+
setattr(self, field_name, field_value)
|
| 559 |
+
except TypeError:
|
| 560 |
+
_ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
|
| 561 |
+
|
| 562 |
+
init.__module__ = None
|
| 563 |
+
init.__doc__ = None
|
| 564 |
+
cls.__init__ = init
|
| 565 |
+
|
| 566 |
+
|
| 567 |
+
def _GetFieldByName(message_descriptor, field_name):
|
| 568 |
+
"""Returns a field descriptor by field name.
|
| 569 |
+
|
| 570 |
+
Args:
|
| 571 |
+
message_descriptor: A Descriptor describing all fields in message.
|
| 572 |
+
field_name: The name of the field to retrieve.
|
| 573 |
+
Returns:
|
| 574 |
+
The field descriptor associated with the field name.
|
| 575 |
+
"""
|
| 576 |
+
try:
|
| 577 |
+
return message_descriptor.fields_by_name[field_name]
|
| 578 |
+
except KeyError:
|
| 579 |
+
raise ValueError('Protocol message %s has no "%s" field.' %
|
| 580 |
+
(message_descriptor.name, field_name))
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
def _AddPropertiesForFields(descriptor, cls):
|
| 584 |
+
"""Adds properties for all fields in this protocol message type."""
|
| 585 |
+
for field in descriptor.fields:
|
| 586 |
+
_AddPropertiesForField(field, cls)
|
| 587 |
+
|
| 588 |
+
if descriptor.is_extendable:
|
| 589 |
+
# _ExtensionDict is just an adaptor with no state so we allocate a new one
|
| 590 |
+
# every time it is accessed.
|
| 591 |
+
cls.Extensions = property(lambda self: _ExtensionDict(self))
|
| 592 |
+
|
| 593 |
+
|
| 594 |
+
def _AddPropertiesForField(field, cls):
|
| 595 |
+
"""Adds a public property for a protocol message field.
|
| 596 |
+
Clients can use this property to get and (in the case
|
| 597 |
+
of non-repeated scalar fields) directly set the value
|
| 598 |
+
of a protocol message field.
|
| 599 |
+
|
| 600 |
+
Args:
|
| 601 |
+
field: A FieldDescriptor for this field.
|
| 602 |
+
cls: The class we're constructing.
|
| 603 |
+
"""
|
| 604 |
+
# Catch it if we add other types that we should
|
| 605 |
+
# handle specially here.
|
| 606 |
+
assert _FieldDescriptor.MAX_CPPTYPE == 10
|
| 607 |
+
|
| 608 |
+
constant_name = field.name.upper() + '_FIELD_NUMBER'
|
| 609 |
+
setattr(cls, constant_name, field.number)
|
| 610 |
+
|
| 611 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 612 |
+
_AddPropertiesForRepeatedField(field, cls)
|
| 613 |
+
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 614 |
+
_AddPropertiesForNonRepeatedCompositeField(field, cls)
|
| 615 |
+
else:
|
| 616 |
+
_AddPropertiesForNonRepeatedScalarField(field, cls)
|
| 617 |
+
|
| 618 |
+
|
| 619 |
+
class _FieldProperty(property):
|
| 620 |
+
__slots__ = ('DESCRIPTOR',)
|
| 621 |
+
|
| 622 |
+
def __init__(self, descriptor, getter, setter, doc):
|
| 623 |
+
property.__init__(self, getter, setter, doc=doc)
|
| 624 |
+
self.DESCRIPTOR = descriptor
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
def _AddPropertiesForRepeatedField(field, cls):
|
| 628 |
+
"""Adds a public property for a "repeated" protocol message field. Clients
|
| 629 |
+
can use this property to get the value of the field, which will be either a
|
| 630 |
+
RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see
|
| 631 |
+
below).
|
| 632 |
+
|
| 633 |
+
Note that when clients add values to these containers, we perform
|
| 634 |
+
type-checking in the case of repeated scalar fields, and we also set any
|
| 635 |
+
necessary "has" bits as a side-effect.
|
| 636 |
+
|
| 637 |
+
Args:
|
| 638 |
+
field: A FieldDescriptor for this field.
|
| 639 |
+
cls: The class we're constructing.
|
| 640 |
+
"""
|
| 641 |
+
proto_field_name = field.name
|
| 642 |
+
property_name = _PropertyName(proto_field_name)
|
| 643 |
+
|
| 644 |
+
def getter(self):
|
| 645 |
+
field_value = self._fields.get(field)
|
| 646 |
+
if field_value is None:
|
| 647 |
+
# Construct a new object to represent this field.
|
| 648 |
+
field_value = field._default_constructor(self)
|
| 649 |
+
|
| 650 |
+
# Atomically check if another thread has preempted us and, if not, swap
|
| 651 |
+
# in the new object we just created. If someone has preempted us, we
|
| 652 |
+
# take that object and discard ours.
|
| 653 |
+
# WARNING: We are relying on setdefault() being atomic. This is true
|
| 654 |
+
# in CPython but we haven't investigated others. This warning appears
|
| 655 |
+
# in several other locations in this file.
|
| 656 |
+
field_value = self._fields.setdefault(field, field_value)
|
| 657 |
+
return field_value
|
| 658 |
+
getter.__module__ = None
|
| 659 |
+
getter.__doc__ = 'Getter for %s.' % proto_field_name
|
| 660 |
+
|
| 661 |
+
# We define a setter just so we can throw an exception with a more
|
| 662 |
+
# helpful error message.
|
| 663 |
+
def setter(self, new_value):
|
| 664 |
+
raise AttributeError('Assignment not allowed to repeated field '
|
| 665 |
+
'"%s" in protocol message object.' % proto_field_name)
|
| 666 |
+
|
| 667 |
+
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
|
| 668 |
+
setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
|
| 669 |
+
|
| 670 |
+
|
| 671 |
+
def _AddPropertiesForNonRepeatedScalarField(field, cls):
|
| 672 |
+
"""Adds a public property for a nonrepeated, scalar protocol message field.
|
| 673 |
+
Clients can use this property to get and directly set the value of the field.
|
| 674 |
+
Note that when the client sets the value of a field by using this property,
|
| 675 |
+
all necessary "has" bits are set as a side-effect, and we also perform
|
| 676 |
+
type-checking.
|
| 677 |
+
|
| 678 |
+
Args:
|
| 679 |
+
field: A FieldDescriptor for this field.
|
| 680 |
+
cls: The class we're constructing.
|
| 681 |
+
"""
|
| 682 |
+
proto_field_name = field.name
|
| 683 |
+
property_name = _PropertyName(proto_field_name)
|
| 684 |
+
type_checker = type_checkers.GetTypeChecker(field)
|
| 685 |
+
default_value = field.default_value
|
| 686 |
+
is_proto3 = field.containing_type.syntax == 'proto3'
|
| 687 |
+
|
| 688 |
+
def getter(self):
|
| 689 |
+
# TODO(protobuf-team): This may be broken since there may not be
|
| 690 |
+
# default_value. Combine with has_default_value somehow.
|
| 691 |
+
return self._fields.get(field, default_value)
|
| 692 |
+
getter.__module__ = None
|
| 693 |
+
getter.__doc__ = 'Getter for %s.' % proto_field_name
|
| 694 |
+
|
| 695 |
+
clear_when_set_to_default = is_proto3 and not field.containing_oneof
|
| 696 |
+
|
| 697 |
+
def field_setter(self, new_value):
|
| 698 |
+
# pylint: disable=protected-access
|
| 699 |
+
# Testing the value for truthiness captures all of the proto3 defaults
|
| 700 |
+
# (0, 0.0, enum 0, and False).
|
| 701 |
+
try:
|
| 702 |
+
new_value = type_checker.CheckValue(new_value)
|
| 703 |
+
except TypeError as e:
|
| 704 |
+
raise TypeError(
|
| 705 |
+
'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
|
| 706 |
+
if clear_when_set_to_default and not new_value:
|
| 707 |
+
self._fields.pop(field, None)
|
| 708 |
+
else:
|
| 709 |
+
self._fields[field] = new_value
|
| 710 |
+
# Check _cached_byte_size_dirty inline to improve performance, since scalar
|
| 711 |
+
# setters are called frequently.
|
| 712 |
+
if not self._cached_byte_size_dirty:
|
| 713 |
+
self._Modified()
|
| 714 |
+
|
| 715 |
+
if field.containing_oneof:
|
| 716 |
+
def setter(self, new_value):
|
| 717 |
+
field_setter(self, new_value)
|
| 718 |
+
self._UpdateOneofState(field)
|
| 719 |
+
else:
|
| 720 |
+
setter = field_setter
|
| 721 |
+
|
| 722 |
+
setter.__module__ = None
|
| 723 |
+
setter.__doc__ = 'Setter for %s.' % proto_field_name
|
| 724 |
+
|
| 725 |
+
# Add a property to encapsulate the getter/setter.
|
| 726 |
+
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
|
| 727 |
+
setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
|
| 728 |
+
|
| 729 |
+
|
| 730 |
+
def _AddPropertiesForNonRepeatedCompositeField(field, cls):
|
| 731 |
+
"""Adds a public property for a nonrepeated, composite protocol message field.
|
| 732 |
+
A composite field is a "group" or "message" field.
|
| 733 |
+
|
| 734 |
+
Clients can use this property to get the value of the field, but cannot
|
| 735 |
+
assign to the property directly.
|
| 736 |
+
|
| 737 |
+
Args:
|
| 738 |
+
field: A FieldDescriptor for this field.
|
| 739 |
+
cls: The class we're constructing.
|
| 740 |
+
"""
|
| 741 |
+
# TODO(robinson): Remove duplication with similar method
|
| 742 |
+
# for non-repeated scalars.
|
| 743 |
+
proto_field_name = field.name
|
| 744 |
+
property_name = _PropertyName(proto_field_name)
|
| 745 |
+
|
| 746 |
+
def getter(self):
|
| 747 |
+
field_value = self._fields.get(field)
|
| 748 |
+
if field_value is None:
|
| 749 |
+
# Construct a new object to represent this field.
|
| 750 |
+
field_value = field._default_constructor(self)
|
| 751 |
+
|
| 752 |
+
# Atomically check if another thread has preempted us and, if not, swap
|
| 753 |
+
# in the new object we just created. If someone has preempted us, we
|
| 754 |
+
# take that object and discard ours.
|
| 755 |
+
# WARNING: We are relying on setdefault() being atomic. This is true
|
| 756 |
+
# in CPython but we haven't investigated others. This warning appears
|
| 757 |
+
# in several other locations in this file.
|
| 758 |
+
field_value = self._fields.setdefault(field, field_value)
|
| 759 |
+
return field_value
|
| 760 |
+
getter.__module__ = None
|
| 761 |
+
getter.__doc__ = 'Getter for %s.' % proto_field_name
|
| 762 |
+
|
| 763 |
+
# We define a setter just so we can throw an exception with a more
|
| 764 |
+
# helpful error message.
|
| 765 |
+
def setter(self, new_value):
|
| 766 |
+
raise AttributeError('Assignment not allowed to composite field '
|
| 767 |
+
'"%s" in protocol message object.' % proto_field_name)
|
| 768 |
+
|
| 769 |
+
# Add a property to encapsulate the getter.
|
| 770 |
+
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
|
| 771 |
+
setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
|
| 772 |
+
|
| 773 |
+
|
| 774 |
+
def _AddPropertiesForExtensions(descriptor, cls):
|
| 775 |
+
"""Adds properties for all fields in this protocol message type."""
|
| 776 |
+
extensions = descriptor.extensions_by_name
|
| 777 |
+
for extension_name, extension_field in extensions.items():
|
| 778 |
+
constant_name = extension_name.upper() + '_FIELD_NUMBER'
|
| 779 |
+
setattr(cls, constant_name, extension_field.number)
|
| 780 |
+
|
| 781 |
+
# TODO(amauryfa): Migrate all users of these attributes to functions like
|
| 782 |
+
# pool.FindExtensionByNumber(descriptor).
|
| 783 |
+
if descriptor.file is not None:
|
| 784 |
+
# TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
|
| 785 |
+
pool = descriptor.file.pool
|
| 786 |
+
cls._extensions_by_number = pool._extensions_by_number[descriptor]
|
| 787 |
+
cls._extensions_by_name = pool._extensions_by_name[descriptor]
|
| 788 |
+
|
| 789 |
+
def _AddStaticMethods(cls):
|
| 790 |
+
# TODO(robinson): This probably needs to be thread-safe(?)
|
| 791 |
+
def RegisterExtension(extension_handle):
|
| 792 |
+
extension_handle.containing_type = cls.DESCRIPTOR
|
| 793 |
+
# TODO(amauryfa): Use cls.MESSAGE_FACTORY.pool when available.
|
| 794 |
+
# pylint: disable=protected-access
|
| 795 |
+
cls.DESCRIPTOR.file.pool._AddExtensionDescriptor(extension_handle)
|
| 796 |
+
_AttachFieldHelpers(cls, extension_handle)
|
| 797 |
+
cls.RegisterExtension = staticmethod(RegisterExtension)
|
| 798 |
+
|
| 799 |
+
def FromString(s):
|
| 800 |
+
message = cls()
|
| 801 |
+
message.MergeFromString(s)
|
| 802 |
+
return message
|
| 803 |
+
cls.FromString = staticmethod(FromString)
|
| 804 |
+
|
| 805 |
+
|
| 806 |
+
def _IsPresent(item):
|
| 807 |
+
"""Given a (FieldDescriptor, value) tuple from _fields, return true if the
|
| 808 |
+
value should be included in the list returned by ListFields()."""
|
| 809 |
+
|
| 810 |
+
if item[0].label == _FieldDescriptor.LABEL_REPEATED:
|
| 811 |
+
return bool(item[1])
|
| 812 |
+
elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 813 |
+
return item[1]._is_present_in_parent
|
| 814 |
+
else:
|
| 815 |
+
return True
|
| 816 |
+
|
| 817 |
+
|
| 818 |
+
def _AddListFieldsMethod(message_descriptor, cls):
|
| 819 |
+
"""Helper for _AddMessageMethods()."""
|
| 820 |
+
|
| 821 |
+
def ListFields(self):
|
| 822 |
+
all_fields = [item for item in self._fields.items() if _IsPresent(item)]
|
| 823 |
+
all_fields.sort(key = lambda item: item[0].number)
|
| 824 |
+
return all_fields
|
| 825 |
+
|
| 826 |
+
cls.ListFields = ListFields
|
| 827 |
+
|
| 828 |
+
_PROTO3_ERROR_TEMPLATE = \
|
| 829 |
+
('Protocol message %s has no non-repeated submessage field "%s" '
|
| 830 |
+
'nor marked as optional')
|
| 831 |
+
_PROTO2_ERROR_TEMPLATE = 'Protocol message %s has no non-repeated field "%s"'
|
| 832 |
+
|
| 833 |
+
def _AddHasFieldMethod(message_descriptor, cls):
|
| 834 |
+
"""Helper for _AddMessageMethods()."""
|
| 835 |
+
|
| 836 |
+
is_proto3 = (message_descriptor.syntax == "proto3")
|
| 837 |
+
error_msg = _PROTO3_ERROR_TEMPLATE if is_proto3 else _PROTO2_ERROR_TEMPLATE
|
| 838 |
+
|
| 839 |
+
hassable_fields = {}
|
| 840 |
+
for field in message_descriptor.fields:
|
| 841 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 842 |
+
continue
|
| 843 |
+
# For proto3, only submessages and fields inside a oneof have presence.
|
| 844 |
+
if (is_proto3 and field.cpp_type != _FieldDescriptor.CPPTYPE_MESSAGE and
|
| 845 |
+
not field.containing_oneof):
|
| 846 |
+
continue
|
| 847 |
+
hassable_fields[field.name] = field
|
| 848 |
+
|
| 849 |
+
# Has methods are supported for oneof descriptors.
|
| 850 |
+
for oneof in message_descriptor.oneofs:
|
| 851 |
+
hassable_fields[oneof.name] = oneof
|
| 852 |
+
|
| 853 |
+
def HasField(self, field_name):
|
| 854 |
+
try:
|
| 855 |
+
field = hassable_fields[field_name]
|
| 856 |
+
except KeyError:
|
| 857 |
+
raise ValueError(error_msg % (message_descriptor.full_name, field_name))
|
| 858 |
+
|
| 859 |
+
if isinstance(field, descriptor_mod.OneofDescriptor):
|
| 860 |
+
try:
|
| 861 |
+
return HasField(self, self._oneofs[field].name)
|
| 862 |
+
except KeyError:
|
| 863 |
+
return False
|
| 864 |
+
else:
|
| 865 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 866 |
+
value = self._fields.get(field)
|
| 867 |
+
return value is not None and value._is_present_in_parent
|
| 868 |
+
else:
|
| 869 |
+
return field in self._fields
|
| 870 |
+
|
| 871 |
+
cls.HasField = HasField
|
| 872 |
+
|
| 873 |
+
|
| 874 |
+
def _AddClearFieldMethod(message_descriptor, cls):
|
| 875 |
+
"""Helper for _AddMessageMethods()."""
|
| 876 |
+
def ClearField(self, field_name):
|
| 877 |
+
try:
|
| 878 |
+
field = message_descriptor.fields_by_name[field_name]
|
| 879 |
+
except KeyError:
|
| 880 |
+
try:
|
| 881 |
+
field = message_descriptor.oneofs_by_name[field_name]
|
| 882 |
+
if field in self._oneofs:
|
| 883 |
+
field = self._oneofs[field]
|
| 884 |
+
else:
|
| 885 |
+
return
|
| 886 |
+
except KeyError:
|
| 887 |
+
raise ValueError('Protocol message %s has no "%s" field.' %
|
| 888 |
+
(message_descriptor.name, field_name))
|
| 889 |
+
|
| 890 |
+
if field in self._fields:
|
| 891 |
+
# To match the C++ implementation, we need to invalidate iterators
|
| 892 |
+
# for map fields when ClearField() happens.
|
| 893 |
+
if hasattr(self._fields[field], 'InvalidateIterators'):
|
| 894 |
+
self._fields[field].InvalidateIterators()
|
| 895 |
+
|
| 896 |
+
# Note: If the field is a sub-message, its listener will still point
|
| 897 |
+
# at us. That's fine, because the worst than can happen is that it
|
| 898 |
+
# will call _Modified() and invalidate our byte size. Big deal.
|
| 899 |
+
del self._fields[field]
|
| 900 |
+
|
| 901 |
+
if self._oneofs.get(field.containing_oneof, None) is field:
|
| 902 |
+
del self._oneofs[field.containing_oneof]
|
| 903 |
+
|
| 904 |
+
# Always call _Modified() -- even if nothing was changed, this is
|
| 905 |
+
# a mutating method, and thus calling it should cause the field to become
|
| 906 |
+
# present in the parent message.
|
| 907 |
+
self._Modified()
|
| 908 |
+
|
| 909 |
+
cls.ClearField = ClearField
|
| 910 |
+
|
| 911 |
+
|
| 912 |
+
def _AddClearExtensionMethod(cls):
|
| 913 |
+
"""Helper for _AddMessageMethods()."""
|
| 914 |
+
def ClearExtension(self, extension_handle):
|
| 915 |
+
extension_dict._VerifyExtensionHandle(self, extension_handle)
|
| 916 |
+
|
| 917 |
+
# Similar to ClearField(), above.
|
| 918 |
+
if extension_handle in self._fields:
|
| 919 |
+
del self._fields[extension_handle]
|
| 920 |
+
self._Modified()
|
| 921 |
+
cls.ClearExtension = ClearExtension
|
| 922 |
+
|
| 923 |
+
|
| 924 |
+
def _AddHasExtensionMethod(cls):
|
| 925 |
+
"""Helper for _AddMessageMethods()."""
|
| 926 |
+
def HasExtension(self, extension_handle):
|
| 927 |
+
extension_dict._VerifyExtensionHandle(self, extension_handle)
|
| 928 |
+
if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
|
| 929 |
+
raise KeyError('"%s" is repeated.' % extension_handle.full_name)
|
| 930 |
+
|
| 931 |
+
if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 932 |
+
value = self._fields.get(extension_handle)
|
| 933 |
+
return value is not None and value._is_present_in_parent
|
| 934 |
+
else:
|
| 935 |
+
return extension_handle in self._fields
|
| 936 |
+
cls.HasExtension = HasExtension
|
| 937 |
+
|
| 938 |
+
def _InternalUnpackAny(msg):
|
| 939 |
+
"""Unpacks Any message and returns the unpacked message.
|
| 940 |
+
|
| 941 |
+
This internal method is different from public Any Unpack method which takes
|
| 942 |
+
the target message as argument. _InternalUnpackAny method does not have
|
| 943 |
+
target message type and need to find the message type in descriptor pool.
|
| 944 |
+
|
| 945 |
+
Args:
|
| 946 |
+
msg: An Any message to be unpacked.
|
| 947 |
+
|
| 948 |
+
Returns:
|
| 949 |
+
The unpacked message.
|
| 950 |
+
"""
|
| 951 |
+
# TODO(amauryfa): Don't use the factory of generated messages.
|
| 952 |
+
# To make Any work with custom factories, use the message factory of the
|
| 953 |
+
# parent message.
|
| 954 |
+
# pylint: disable=g-import-not-at-top
|
| 955 |
+
from google.protobuf import symbol_database
|
| 956 |
+
factory = symbol_database.Default()
|
| 957 |
+
|
| 958 |
+
type_url = msg.type_url
|
| 959 |
+
|
| 960 |
+
if not type_url:
|
| 961 |
+
return None
|
| 962 |
+
|
| 963 |
+
# TODO(haberman): For now we just strip the hostname. Better logic will be
|
| 964 |
+
# required.
|
| 965 |
+
type_name = type_url.split('/')[-1]
|
| 966 |
+
descriptor = factory.pool.FindMessageTypeByName(type_name)
|
| 967 |
+
|
| 968 |
+
if descriptor is None:
|
| 969 |
+
return None
|
| 970 |
+
|
| 971 |
+
message_class = factory.GetPrototype(descriptor)
|
| 972 |
+
message = message_class()
|
| 973 |
+
|
| 974 |
+
message.ParseFromString(msg.value)
|
| 975 |
+
return message
|
| 976 |
+
|
| 977 |
+
|
| 978 |
+
def _AddEqualsMethod(message_descriptor, cls):
|
| 979 |
+
"""Helper for _AddMessageMethods()."""
|
| 980 |
+
def __eq__(self, other):
|
| 981 |
+
if (not isinstance(other, message_mod.Message) or
|
| 982 |
+
other.DESCRIPTOR != self.DESCRIPTOR):
|
| 983 |
+
return False
|
| 984 |
+
|
| 985 |
+
if self is other:
|
| 986 |
+
return True
|
| 987 |
+
|
| 988 |
+
if self.DESCRIPTOR.full_name == _AnyFullTypeName:
|
| 989 |
+
any_a = _InternalUnpackAny(self)
|
| 990 |
+
any_b = _InternalUnpackAny(other)
|
| 991 |
+
if any_a and any_b:
|
| 992 |
+
return any_a == any_b
|
| 993 |
+
|
| 994 |
+
if not self.ListFields() == other.ListFields():
|
| 995 |
+
return False
|
| 996 |
+
|
| 997 |
+
# TODO(jieluo): Fix UnknownFieldSet to consider MessageSet extensions,
|
| 998 |
+
# then use it for the comparison.
|
| 999 |
+
unknown_fields = list(self._unknown_fields)
|
| 1000 |
+
unknown_fields.sort()
|
| 1001 |
+
other_unknown_fields = list(other._unknown_fields)
|
| 1002 |
+
other_unknown_fields.sort()
|
| 1003 |
+
return unknown_fields == other_unknown_fields
|
| 1004 |
+
|
| 1005 |
+
cls.__eq__ = __eq__
|
| 1006 |
+
|
| 1007 |
+
|
| 1008 |
+
def _AddStrMethod(message_descriptor, cls):
|
| 1009 |
+
"""Helper for _AddMessageMethods()."""
|
| 1010 |
+
def __str__(self):
|
| 1011 |
+
return text_format.MessageToString(self)
|
| 1012 |
+
cls.__str__ = __str__
|
| 1013 |
+
|
| 1014 |
+
|
| 1015 |
+
def _AddReprMethod(message_descriptor, cls):
|
| 1016 |
+
"""Helper for _AddMessageMethods()."""
|
| 1017 |
+
def __repr__(self):
|
| 1018 |
+
return text_format.MessageToString(self)
|
| 1019 |
+
cls.__repr__ = __repr__
|
| 1020 |
+
|
| 1021 |
+
|
| 1022 |
+
def _AddUnicodeMethod(unused_message_descriptor, cls):
|
| 1023 |
+
"""Helper for _AddMessageMethods()."""
|
| 1024 |
+
|
| 1025 |
+
def __unicode__(self):
|
| 1026 |
+
return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
|
| 1027 |
+
cls.__unicode__ = __unicode__
|
| 1028 |
+
|
| 1029 |
+
|
| 1030 |
+
def _BytesForNonRepeatedElement(value, field_number, field_type):
|
| 1031 |
+
"""Returns the number of bytes needed to serialize a non-repeated element.
|
| 1032 |
+
The returned byte count includes space for tag information and any
|
| 1033 |
+
other additional space associated with serializing value.
|
| 1034 |
+
|
| 1035 |
+
Args:
|
| 1036 |
+
value: Value we're serializing.
|
| 1037 |
+
field_number: Field number of this value. (Since the field number
|
| 1038 |
+
is stored as part of a varint-encoded tag, this has an impact
|
| 1039 |
+
on the total bytes required to serialize the value).
|
| 1040 |
+
field_type: The type of the field. One of the TYPE_* constants
|
| 1041 |
+
within FieldDescriptor.
|
| 1042 |
+
"""
|
| 1043 |
+
try:
|
| 1044 |
+
fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
|
| 1045 |
+
return fn(field_number, value)
|
| 1046 |
+
except KeyError:
|
| 1047 |
+
raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
|
| 1048 |
+
|
| 1049 |
+
|
| 1050 |
+
def _AddByteSizeMethod(message_descriptor, cls):
|
| 1051 |
+
"""Helper for _AddMessageMethods()."""
|
| 1052 |
+
|
| 1053 |
+
def ByteSize(self):
|
| 1054 |
+
if not self._cached_byte_size_dirty:
|
| 1055 |
+
return self._cached_byte_size
|
| 1056 |
+
|
| 1057 |
+
size = 0
|
| 1058 |
+
descriptor = self.DESCRIPTOR
|
| 1059 |
+
if descriptor.GetOptions().map_entry:
|
| 1060 |
+
# Fields of map entry should always be serialized.
|
| 1061 |
+
size = descriptor.fields_by_name['key']._sizer(self.key)
|
| 1062 |
+
size += descriptor.fields_by_name['value']._sizer(self.value)
|
| 1063 |
+
else:
|
| 1064 |
+
for field_descriptor, field_value in self.ListFields():
|
| 1065 |
+
size += field_descriptor._sizer(field_value)
|
| 1066 |
+
for tag_bytes, value_bytes in self._unknown_fields:
|
| 1067 |
+
size += len(tag_bytes) + len(value_bytes)
|
| 1068 |
+
|
| 1069 |
+
self._cached_byte_size = size
|
| 1070 |
+
self._cached_byte_size_dirty = False
|
| 1071 |
+
self._listener_for_children.dirty = False
|
| 1072 |
+
return size
|
| 1073 |
+
|
| 1074 |
+
cls.ByteSize = ByteSize
|
| 1075 |
+
|
| 1076 |
+
|
| 1077 |
+
def _AddSerializeToStringMethod(message_descriptor, cls):
|
| 1078 |
+
"""Helper for _AddMessageMethods()."""
|
| 1079 |
+
|
| 1080 |
+
def SerializeToString(self, **kwargs):
|
| 1081 |
+
# Check if the message has all of its required fields set.
|
| 1082 |
+
if not self.IsInitialized():
|
| 1083 |
+
raise message_mod.EncodeError(
|
| 1084 |
+
'Message %s is missing required fields: %s' % (
|
| 1085 |
+
self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
|
| 1086 |
+
return self.SerializePartialToString(**kwargs)
|
| 1087 |
+
cls.SerializeToString = SerializeToString
|
| 1088 |
+
|
| 1089 |
+
|
| 1090 |
+
def _AddSerializePartialToStringMethod(message_descriptor, cls):
|
| 1091 |
+
"""Helper for _AddMessageMethods()."""
|
| 1092 |
+
|
| 1093 |
+
def SerializePartialToString(self, **kwargs):
|
| 1094 |
+
out = BytesIO()
|
| 1095 |
+
self._InternalSerialize(out.write, **kwargs)
|
| 1096 |
+
return out.getvalue()
|
| 1097 |
+
cls.SerializePartialToString = SerializePartialToString
|
| 1098 |
+
|
| 1099 |
+
def InternalSerialize(self, write_bytes, deterministic=None):
|
| 1100 |
+
if deterministic is None:
|
| 1101 |
+
deterministic = (
|
| 1102 |
+
api_implementation.IsPythonDefaultSerializationDeterministic())
|
| 1103 |
+
else:
|
| 1104 |
+
deterministic = bool(deterministic)
|
| 1105 |
+
|
| 1106 |
+
descriptor = self.DESCRIPTOR
|
| 1107 |
+
if descriptor.GetOptions().map_entry:
|
| 1108 |
+
# Fields of map entry should always be serialized.
|
| 1109 |
+
descriptor.fields_by_name['key']._encoder(
|
| 1110 |
+
write_bytes, self.key, deterministic)
|
| 1111 |
+
descriptor.fields_by_name['value']._encoder(
|
| 1112 |
+
write_bytes, self.value, deterministic)
|
| 1113 |
+
else:
|
| 1114 |
+
for field_descriptor, field_value in self.ListFields():
|
| 1115 |
+
field_descriptor._encoder(write_bytes, field_value, deterministic)
|
| 1116 |
+
for tag_bytes, value_bytes in self._unknown_fields:
|
| 1117 |
+
write_bytes(tag_bytes)
|
| 1118 |
+
write_bytes(value_bytes)
|
| 1119 |
+
cls._InternalSerialize = InternalSerialize
|
| 1120 |
+
|
| 1121 |
+
|
| 1122 |
+
def _AddMergeFromStringMethod(message_descriptor, cls):
|
| 1123 |
+
"""Helper for _AddMessageMethods()."""
|
| 1124 |
+
def MergeFromString(self, serialized):
|
| 1125 |
+
serialized = memoryview(serialized)
|
| 1126 |
+
length = len(serialized)
|
| 1127 |
+
try:
|
| 1128 |
+
if self._InternalParse(serialized, 0, length) != length:
|
| 1129 |
+
# The only reason _InternalParse would return early is if it
|
| 1130 |
+
# encountered an end-group tag.
|
| 1131 |
+
raise message_mod.DecodeError('Unexpected end-group tag.')
|
| 1132 |
+
except (IndexError, TypeError):
|
| 1133 |
+
# Now ord(buf[p:p+1]) == ord('') gets TypeError.
|
| 1134 |
+
raise message_mod.DecodeError('Truncated message.')
|
| 1135 |
+
except struct.error as e:
|
| 1136 |
+
raise message_mod.DecodeError(e)
|
| 1137 |
+
return length # Return this for legacy reasons.
|
| 1138 |
+
cls.MergeFromString = MergeFromString
|
| 1139 |
+
|
| 1140 |
+
local_ReadTag = decoder.ReadTag
|
| 1141 |
+
local_SkipField = decoder.SkipField
|
| 1142 |
+
decoders_by_tag = cls._decoders_by_tag
|
| 1143 |
+
|
| 1144 |
+
def InternalParse(self, buffer, pos, end):
|
| 1145 |
+
"""Create a message from serialized bytes.
|
| 1146 |
+
|
| 1147 |
+
Args:
|
| 1148 |
+
self: Message, instance of the proto message object.
|
| 1149 |
+
buffer: memoryview of the serialized data.
|
| 1150 |
+
pos: int, position to start in the serialized data.
|
| 1151 |
+
end: int, end position of the serialized data.
|
| 1152 |
+
|
| 1153 |
+
Returns:
|
| 1154 |
+
Message object.
|
| 1155 |
+
"""
|
| 1156 |
+
# Guard against internal misuse, since this function is called internally
|
| 1157 |
+
# quite extensively, and its easy to accidentally pass bytes.
|
| 1158 |
+
assert isinstance(buffer, memoryview)
|
| 1159 |
+
self._Modified()
|
| 1160 |
+
field_dict = self._fields
|
| 1161 |
+
# pylint: disable=protected-access
|
| 1162 |
+
unknown_field_set = self._unknown_field_set
|
| 1163 |
+
while pos != end:
|
| 1164 |
+
(tag_bytes, new_pos) = local_ReadTag(buffer, pos)
|
| 1165 |
+
field_decoder, field_desc = decoders_by_tag.get(tag_bytes, (None, None))
|
| 1166 |
+
if field_decoder is None:
|
| 1167 |
+
if not self._unknown_fields: # pylint: disable=protected-access
|
| 1168 |
+
self._unknown_fields = [] # pylint: disable=protected-access
|
| 1169 |
+
if unknown_field_set is None:
|
| 1170 |
+
# pylint: disable=protected-access
|
| 1171 |
+
self._unknown_field_set = containers.UnknownFieldSet()
|
| 1172 |
+
# pylint: disable=protected-access
|
| 1173 |
+
unknown_field_set = self._unknown_field_set
|
| 1174 |
+
# pylint: disable=protected-access
|
| 1175 |
+
(tag, _) = decoder._DecodeVarint(tag_bytes, 0)
|
| 1176 |
+
field_number, wire_type = wire_format.UnpackTag(tag)
|
| 1177 |
+
if field_number == 0:
|
| 1178 |
+
raise message_mod.DecodeError('Field number 0 is illegal.')
|
| 1179 |
+
# TODO(jieluo): remove old_pos.
|
| 1180 |
+
old_pos = new_pos
|
| 1181 |
+
(data, new_pos) = decoder._DecodeUnknownField(
|
| 1182 |
+
buffer, new_pos, wire_type) # pylint: disable=protected-access
|
| 1183 |
+
if new_pos == -1:
|
| 1184 |
+
return pos
|
| 1185 |
+
# pylint: disable=protected-access
|
| 1186 |
+
unknown_field_set._add(field_number, wire_type, data)
|
| 1187 |
+
# TODO(jieluo): remove _unknown_fields.
|
| 1188 |
+
new_pos = local_SkipField(buffer, old_pos, end, tag_bytes)
|
| 1189 |
+
if new_pos == -1:
|
| 1190 |
+
return pos
|
| 1191 |
+
self._unknown_fields.append(
|
| 1192 |
+
(tag_bytes, buffer[old_pos:new_pos].tobytes()))
|
| 1193 |
+
pos = new_pos
|
| 1194 |
+
else:
|
| 1195 |
+
pos = field_decoder(buffer, new_pos, end, self, field_dict)
|
| 1196 |
+
if field_desc:
|
| 1197 |
+
self._UpdateOneofState(field_desc)
|
| 1198 |
+
return pos
|
| 1199 |
+
cls._InternalParse = InternalParse
|
| 1200 |
+
|
| 1201 |
+
|
| 1202 |
+
def _AddIsInitializedMethod(message_descriptor, cls):
|
| 1203 |
+
"""Adds the IsInitialized and FindInitializationError methods to the
|
| 1204 |
+
protocol message class."""
|
| 1205 |
+
|
| 1206 |
+
required_fields = [field for field in message_descriptor.fields
|
| 1207 |
+
if field.label == _FieldDescriptor.LABEL_REQUIRED]
|
| 1208 |
+
|
| 1209 |
+
def IsInitialized(self, errors=None):
|
| 1210 |
+
"""Checks if all required fields of a message are set.
|
| 1211 |
+
|
| 1212 |
+
Args:
|
| 1213 |
+
errors: A list which, if provided, will be populated with the field
|
| 1214 |
+
paths of all missing required fields.
|
| 1215 |
+
|
| 1216 |
+
Returns:
|
| 1217 |
+
True iff the specified message has all required fields set.
|
| 1218 |
+
"""
|
| 1219 |
+
|
| 1220 |
+
# Performance is critical so we avoid HasField() and ListFields().
|
| 1221 |
+
|
| 1222 |
+
for field in required_fields:
|
| 1223 |
+
if (field not in self._fields or
|
| 1224 |
+
(field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
|
| 1225 |
+
not self._fields[field]._is_present_in_parent)):
|
| 1226 |
+
if errors is not None:
|
| 1227 |
+
errors.extend(self.FindInitializationErrors())
|
| 1228 |
+
return False
|
| 1229 |
+
|
| 1230 |
+
for field, value in list(self._fields.items()): # dict can change size!
|
| 1231 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 1232 |
+
if field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 1233 |
+
if (field.message_type.has_options and
|
| 1234 |
+
field.message_type.GetOptions().map_entry):
|
| 1235 |
+
continue
|
| 1236 |
+
for element in value:
|
| 1237 |
+
if not element.IsInitialized():
|
| 1238 |
+
if errors is not None:
|
| 1239 |
+
errors.extend(self.FindInitializationErrors())
|
| 1240 |
+
return False
|
| 1241 |
+
elif value._is_present_in_parent and not value.IsInitialized():
|
| 1242 |
+
if errors is not None:
|
| 1243 |
+
errors.extend(self.FindInitializationErrors())
|
| 1244 |
+
return False
|
| 1245 |
+
|
| 1246 |
+
return True
|
| 1247 |
+
|
| 1248 |
+
cls.IsInitialized = IsInitialized
|
| 1249 |
+
|
| 1250 |
+
def FindInitializationErrors(self):
|
| 1251 |
+
"""Finds required fields which are not initialized.
|
| 1252 |
+
|
| 1253 |
+
Returns:
|
| 1254 |
+
A list of strings. Each string is a path to an uninitialized field from
|
| 1255 |
+
the top-level message, e.g. "foo.bar[5].baz".
|
| 1256 |
+
"""
|
| 1257 |
+
|
| 1258 |
+
errors = [] # simplify things
|
| 1259 |
+
|
| 1260 |
+
for field in required_fields:
|
| 1261 |
+
if not self.HasField(field.name):
|
| 1262 |
+
errors.append(field.name)
|
| 1263 |
+
|
| 1264 |
+
for field, value in self.ListFields():
|
| 1265 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 1266 |
+
if field.is_extension:
|
| 1267 |
+
name = '(%s)' % field.full_name
|
| 1268 |
+
else:
|
| 1269 |
+
name = field.name
|
| 1270 |
+
|
| 1271 |
+
if _IsMapField(field):
|
| 1272 |
+
if _IsMessageMapField(field):
|
| 1273 |
+
for key in value:
|
| 1274 |
+
element = value[key]
|
| 1275 |
+
prefix = '%s[%s].' % (name, key)
|
| 1276 |
+
sub_errors = element.FindInitializationErrors()
|
| 1277 |
+
errors += [prefix + error for error in sub_errors]
|
| 1278 |
+
else:
|
| 1279 |
+
# ScalarMaps can't have any initialization errors.
|
| 1280 |
+
pass
|
| 1281 |
+
elif field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 1282 |
+
for i in range(len(value)):
|
| 1283 |
+
element = value[i]
|
| 1284 |
+
prefix = '%s[%d].' % (name, i)
|
| 1285 |
+
sub_errors = element.FindInitializationErrors()
|
| 1286 |
+
errors += [prefix + error for error in sub_errors]
|
| 1287 |
+
else:
|
| 1288 |
+
prefix = name + '.'
|
| 1289 |
+
sub_errors = value.FindInitializationErrors()
|
| 1290 |
+
errors += [prefix + error for error in sub_errors]
|
| 1291 |
+
|
| 1292 |
+
return errors
|
| 1293 |
+
|
| 1294 |
+
cls.FindInitializationErrors = FindInitializationErrors
|
| 1295 |
+
|
| 1296 |
+
|
| 1297 |
+
def _FullyQualifiedClassName(klass):
|
| 1298 |
+
module = klass.__module__
|
| 1299 |
+
name = getattr(klass, '__qualname__', klass.__name__)
|
| 1300 |
+
if module in (None, 'builtins', '__builtin__'):
|
| 1301 |
+
return name
|
| 1302 |
+
return module + '.' + name
|
| 1303 |
+
|
| 1304 |
+
|
| 1305 |
+
def _AddMergeFromMethod(cls):
|
| 1306 |
+
LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
|
| 1307 |
+
CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
|
| 1308 |
+
|
| 1309 |
+
def MergeFrom(self, msg):
|
| 1310 |
+
if not isinstance(msg, cls):
|
| 1311 |
+
raise TypeError(
|
| 1312 |
+
'Parameter to MergeFrom() must be instance of same class: '
|
| 1313 |
+
'expected %s got %s.' % (_FullyQualifiedClassName(cls),
|
| 1314 |
+
_FullyQualifiedClassName(msg.__class__)))
|
| 1315 |
+
|
| 1316 |
+
assert msg is not self
|
| 1317 |
+
self._Modified()
|
| 1318 |
+
|
| 1319 |
+
fields = self._fields
|
| 1320 |
+
|
| 1321 |
+
for field, value in msg._fields.items():
|
| 1322 |
+
if field.label == LABEL_REPEATED:
|
| 1323 |
+
field_value = fields.get(field)
|
| 1324 |
+
if field_value is None:
|
| 1325 |
+
# Construct a new object to represent this field.
|
| 1326 |
+
field_value = field._default_constructor(self)
|
| 1327 |
+
fields[field] = field_value
|
| 1328 |
+
field_value.MergeFrom(value)
|
| 1329 |
+
elif field.cpp_type == CPPTYPE_MESSAGE:
|
| 1330 |
+
if value._is_present_in_parent:
|
| 1331 |
+
field_value = fields.get(field)
|
| 1332 |
+
if field_value is None:
|
| 1333 |
+
# Construct a new object to represent this field.
|
| 1334 |
+
field_value = field._default_constructor(self)
|
| 1335 |
+
fields[field] = field_value
|
| 1336 |
+
field_value.MergeFrom(value)
|
| 1337 |
+
else:
|
| 1338 |
+
self._fields[field] = value
|
| 1339 |
+
if field.containing_oneof:
|
| 1340 |
+
self._UpdateOneofState(field)
|
| 1341 |
+
|
| 1342 |
+
if msg._unknown_fields:
|
| 1343 |
+
if not self._unknown_fields:
|
| 1344 |
+
self._unknown_fields = []
|
| 1345 |
+
self._unknown_fields.extend(msg._unknown_fields)
|
| 1346 |
+
# pylint: disable=protected-access
|
| 1347 |
+
if self._unknown_field_set is None:
|
| 1348 |
+
self._unknown_field_set = containers.UnknownFieldSet()
|
| 1349 |
+
self._unknown_field_set._extend(msg._unknown_field_set)
|
| 1350 |
+
|
| 1351 |
+
cls.MergeFrom = MergeFrom
|
| 1352 |
+
|
| 1353 |
+
|
| 1354 |
+
def _AddWhichOneofMethod(message_descriptor, cls):
|
| 1355 |
+
def WhichOneof(self, oneof_name):
|
| 1356 |
+
"""Returns the name of the currently set field inside a oneof, or None."""
|
| 1357 |
+
try:
|
| 1358 |
+
field = message_descriptor.oneofs_by_name[oneof_name]
|
| 1359 |
+
except KeyError:
|
| 1360 |
+
raise ValueError(
|
| 1361 |
+
'Protocol message has no oneof "%s" field.' % oneof_name)
|
| 1362 |
+
|
| 1363 |
+
nested_field = self._oneofs.get(field, None)
|
| 1364 |
+
if nested_field is not None and self.HasField(nested_field.name):
|
| 1365 |
+
return nested_field.name
|
| 1366 |
+
else:
|
| 1367 |
+
return None
|
| 1368 |
+
|
| 1369 |
+
cls.WhichOneof = WhichOneof
|
| 1370 |
+
|
| 1371 |
+
|
| 1372 |
+
def _Clear(self):
|
| 1373 |
+
# Clear fields.
|
| 1374 |
+
self._fields = {}
|
| 1375 |
+
self._unknown_fields = ()
|
| 1376 |
+
# pylint: disable=protected-access
|
| 1377 |
+
if self._unknown_field_set is not None:
|
| 1378 |
+
self._unknown_field_set._clear()
|
| 1379 |
+
self._unknown_field_set = None
|
| 1380 |
+
|
| 1381 |
+
self._oneofs = {}
|
| 1382 |
+
self._Modified()
|
| 1383 |
+
|
| 1384 |
+
|
| 1385 |
+
def _UnknownFields(self):
|
| 1386 |
+
if self._unknown_field_set is None: # pylint: disable=protected-access
|
| 1387 |
+
# pylint: disable=protected-access
|
| 1388 |
+
self._unknown_field_set = containers.UnknownFieldSet()
|
| 1389 |
+
return self._unknown_field_set # pylint: disable=protected-access
|
| 1390 |
+
|
| 1391 |
+
|
| 1392 |
+
def _DiscardUnknownFields(self):
|
| 1393 |
+
self._unknown_fields = []
|
| 1394 |
+
self._unknown_field_set = None # pylint: disable=protected-access
|
| 1395 |
+
for field, value in self.ListFields():
|
| 1396 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
|
| 1397 |
+
if _IsMapField(field):
|
| 1398 |
+
if _IsMessageMapField(field):
|
| 1399 |
+
for key in value:
|
| 1400 |
+
value[key].DiscardUnknownFields()
|
| 1401 |
+
elif field.label == _FieldDescriptor.LABEL_REPEATED:
|
| 1402 |
+
for sub_message in value:
|
| 1403 |
+
sub_message.DiscardUnknownFields()
|
| 1404 |
+
else:
|
| 1405 |
+
value.DiscardUnknownFields()
|
| 1406 |
+
|
| 1407 |
+
|
| 1408 |
+
def _SetListener(self, listener):
|
| 1409 |
+
if listener is None:
|
| 1410 |
+
self._listener = message_listener_mod.NullMessageListener()
|
| 1411 |
+
else:
|
| 1412 |
+
self._listener = listener
|
| 1413 |
+
|
| 1414 |
+
|
| 1415 |
+
def _AddMessageMethods(message_descriptor, cls):
|
| 1416 |
+
"""Adds implementations of all Message methods to cls."""
|
| 1417 |
+
_AddListFieldsMethod(message_descriptor, cls)
|
| 1418 |
+
_AddHasFieldMethod(message_descriptor, cls)
|
| 1419 |
+
_AddClearFieldMethod(message_descriptor, cls)
|
| 1420 |
+
if message_descriptor.is_extendable:
|
| 1421 |
+
_AddClearExtensionMethod(cls)
|
| 1422 |
+
_AddHasExtensionMethod(cls)
|
| 1423 |
+
_AddEqualsMethod(message_descriptor, cls)
|
| 1424 |
+
_AddStrMethod(message_descriptor, cls)
|
| 1425 |
+
_AddReprMethod(message_descriptor, cls)
|
| 1426 |
+
_AddUnicodeMethod(message_descriptor, cls)
|
| 1427 |
+
_AddByteSizeMethod(message_descriptor, cls)
|
| 1428 |
+
_AddSerializeToStringMethod(message_descriptor, cls)
|
| 1429 |
+
_AddSerializePartialToStringMethod(message_descriptor, cls)
|
| 1430 |
+
_AddMergeFromStringMethod(message_descriptor, cls)
|
| 1431 |
+
_AddIsInitializedMethod(message_descriptor, cls)
|
| 1432 |
+
_AddMergeFromMethod(cls)
|
| 1433 |
+
_AddWhichOneofMethod(message_descriptor, cls)
|
| 1434 |
+
# Adds methods which do not depend on cls.
|
| 1435 |
+
cls.Clear = _Clear
|
| 1436 |
+
cls.UnknownFields = _UnknownFields
|
| 1437 |
+
cls.DiscardUnknownFields = _DiscardUnknownFields
|
| 1438 |
+
cls._SetListener = _SetListener
|
| 1439 |
+
|
| 1440 |
+
|
| 1441 |
+
def _AddPrivateHelperMethods(message_descriptor, cls):
|
| 1442 |
+
"""Adds implementation of private helper methods to cls."""
|
| 1443 |
+
|
| 1444 |
+
def Modified(self):
|
| 1445 |
+
"""Sets the _cached_byte_size_dirty bit to true,
|
| 1446 |
+
and propagates this to our listener iff this was a state change.
|
| 1447 |
+
"""
|
| 1448 |
+
|
| 1449 |
+
# Note: Some callers check _cached_byte_size_dirty before calling
|
| 1450 |
+
# _Modified() as an extra optimization. So, if this method is ever
|
| 1451 |
+
# changed such that it does stuff even when _cached_byte_size_dirty is
|
| 1452 |
+
# already true, the callers need to be updated.
|
| 1453 |
+
if not self._cached_byte_size_dirty:
|
| 1454 |
+
self._cached_byte_size_dirty = True
|
| 1455 |
+
self._listener_for_children.dirty = True
|
| 1456 |
+
self._is_present_in_parent = True
|
| 1457 |
+
self._listener.Modified()
|
| 1458 |
+
|
| 1459 |
+
def _UpdateOneofState(self, field):
|
| 1460 |
+
"""Sets field as the active field in its containing oneof.
|
| 1461 |
+
|
| 1462 |
+
Will also delete currently active field in the oneof, if it is different
|
| 1463 |
+
from the argument. Does not mark the message as modified.
|
| 1464 |
+
"""
|
| 1465 |
+
other_field = self._oneofs.setdefault(field.containing_oneof, field)
|
| 1466 |
+
if other_field is not field:
|
| 1467 |
+
del self._fields[other_field]
|
| 1468 |
+
self._oneofs[field.containing_oneof] = field
|
| 1469 |
+
|
| 1470 |
+
cls._Modified = Modified
|
| 1471 |
+
cls.SetInParent = Modified
|
| 1472 |
+
cls._UpdateOneofState = _UpdateOneofState
|
| 1473 |
+
|
| 1474 |
+
|
| 1475 |
+
class _Listener(object):
|
| 1476 |
+
|
| 1477 |
+
"""MessageListener implementation that a parent message registers with its
|
| 1478 |
+
child message.
|
| 1479 |
+
|
| 1480 |
+
In order to support semantics like:
|
| 1481 |
+
|
| 1482 |
+
foo.bar.baz.qux = 23
|
| 1483 |
+
assert foo.HasField('bar')
|
| 1484 |
+
|
| 1485 |
+
...child objects must have back references to their parents.
|
| 1486 |
+
This helper class is at the heart of this support.
|
| 1487 |
+
"""
|
| 1488 |
+
|
| 1489 |
+
def __init__(self, parent_message):
|
| 1490 |
+
"""Args:
|
| 1491 |
+
parent_message: The message whose _Modified() method we should call when
|
| 1492 |
+
we receive Modified() messages.
|
| 1493 |
+
"""
|
| 1494 |
+
# This listener establishes a back reference from a child (contained) object
|
| 1495 |
+
# to its parent (containing) object. We make this a weak reference to avoid
|
| 1496 |
+
# creating cyclic garbage when the client finishes with the 'parent' object
|
| 1497 |
+
# in the tree.
|
| 1498 |
+
if isinstance(parent_message, weakref.ProxyType):
|
| 1499 |
+
self._parent_message_weakref = parent_message
|
| 1500 |
+
else:
|
| 1501 |
+
self._parent_message_weakref = weakref.proxy(parent_message)
|
| 1502 |
+
|
| 1503 |
+
# As an optimization, we also indicate directly on the listener whether
|
| 1504 |
+
# or not the parent message is dirty. This way we can avoid traversing
|
| 1505 |
+
# up the tree in the common case.
|
| 1506 |
+
self.dirty = False
|
| 1507 |
+
|
| 1508 |
+
def Modified(self):
|
| 1509 |
+
if self.dirty:
|
| 1510 |
+
return
|
| 1511 |
+
try:
|
| 1512 |
+
# Propagate the signal to our parents iff this is the first field set.
|
| 1513 |
+
self._parent_message_weakref._Modified()
|
| 1514 |
+
except ReferenceError:
|
| 1515 |
+
# We can get here if a client has kept a reference to a child object,
|
| 1516 |
+
# and is now setting a field on it, but the child's parent has been
|
| 1517 |
+
# garbage-collected. This is not an error.
|
| 1518 |
+
pass
|
| 1519 |
+
|
| 1520 |
+
|
| 1521 |
+
class _OneofListener(_Listener):
|
| 1522 |
+
"""Special listener implementation for setting composite oneof fields."""
|
| 1523 |
+
|
| 1524 |
+
def __init__(self, parent_message, field):
|
| 1525 |
+
"""Args:
|
| 1526 |
+
parent_message: The message whose _Modified() method we should call when
|
| 1527 |
+
we receive Modified() messages.
|
| 1528 |
+
field: The descriptor of the field being set in the parent message.
|
| 1529 |
+
"""
|
| 1530 |
+
super(_OneofListener, self).__init__(parent_message)
|
| 1531 |
+
self._field = field
|
| 1532 |
+
|
| 1533 |
+
def Modified(self):
|
| 1534 |
+
"""Also updates the state of the containing oneof in the parent message."""
|
| 1535 |
+
try:
|
| 1536 |
+
self._parent_message_weakref._UpdateOneofState(self._field)
|
| 1537 |
+
super(_OneofListener, self).Modified()
|
| 1538 |
+
except ReferenceError:
|
| 1539 |
+
pass
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/type_checkers.py
ADDED
|
@@ -0,0 +1,435 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Provides type checking routines.
|
| 32 |
+
|
| 33 |
+
This module defines type checking utilities in the forms of dictionaries:
|
| 34 |
+
|
| 35 |
+
VALUE_CHECKERS: A dictionary of field types and a value validation object.
|
| 36 |
+
TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
|
| 37 |
+
function.
|
| 38 |
+
TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
|
| 39 |
+
function.
|
| 40 |
+
FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
|
| 41 |
+
corresponding wire types.
|
| 42 |
+
TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
|
| 43 |
+
function.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
__author__ = 'robinson@google.com (Will Robinson)'
|
| 47 |
+
|
| 48 |
+
import ctypes
|
| 49 |
+
import numbers
|
| 50 |
+
|
| 51 |
+
from google.protobuf.internal import decoder
|
| 52 |
+
from google.protobuf.internal import encoder
|
| 53 |
+
from google.protobuf.internal import wire_format
|
| 54 |
+
from google.protobuf import descriptor
|
| 55 |
+
|
| 56 |
+
_FieldDescriptor = descriptor.FieldDescriptor
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def TruncateToFourByteFloat(original):
|
| 60 |
+
return ctypes.c_float(original).value
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def ToShortestFloat(original):
|
| 64 |
+
"""Returns the shortest float that has same value in wire."""
|
| 65 |
+
# All 4 byte floats have between 6 and 9 significant digits, so we
|
| 66 |
+
# start with 6 as the lower bound.
|
| 67 |
+
# It has to be iterative because use '.9g' directly can not get rid
|
| 68 |
+
# of the noises for most values. For example if set a float_field=0.9
|
| 69 |
+
# use '.9g' will print 0.899999976.
|
| 70 |
+
precision = 6
|
| 71 |
+
rounded = float('{0:.{1}g}'.format(original, precision))
|
| 72 |
+
while TruncateToFourByteFloat(rounded) != original:
|
| 73 |
+
precision += 1
|
| 74 |
+
rounded = float('{0:.{1}g}'.format(original, precision))
|
| 75 |
+
return rounded
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def SupportsOpenEnums(field_descriptor):
|
| 79 |
+
return field_descriptor.containing_type.syntax == 'proto3'
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def GetTypeChecker(field):
|
| 83 |
+
"""Returns a type checker for a message field of the specified types.
|
| 84 |
+
|
| 85 |
+
Args:
|
| 86 |
+
field: FieldDescriptor object for this field.
|
| 87 |
+
|
| 88 |
+
Returns:
|
| 89 |
+
An instance of TypeChecker which can be used to verify the types
|
| 90 |
+
of values assigned to a field of the specified type.
|
| 91 |
+
"""
|
| 92 |
+
if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
|
| 93 |
+
field.type == _FieldDescriptor.TYPE_STRING):
|
| 94 |
+
return UnicodeValueChecker()
|
| 95 |
+
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
| 96 |
+
if SupportsOpenEnums(field):
|
| 97 |
+
# When open enums are supported, any int32 can be assigned.
|
| 98 |
+
return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
|
| 99 |
+
else:
|
| 100 |
+
return EnumValueChecker(field.enum_type)
|
| 101 |
+
return _VALUE_CHECKERS[field.cpp_type]
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
# None of the typecheckers below make any attempt to guard against people
|
| 105 |
+
# subclassing builtin types and doing weird things. We're not trying to
|
| 106 |
+
# protect against malicious clients here, just people accidentally shooting
|
| 107 |
+
# themselves in the foot in obvious ways.
|
| 108 |
+
class TypeChecker(object):
|
| 109 |
+
|
| 110 |
+
"""Type checker used to catch type errors as early as possible
|
| 111 |
+
when the client is setting scalar fields in protocol messages.
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
def __init__(self, *acceptable_types):
|
| 115 |
+
self._acceptable_types = acceptable_types
|
| 116 |
+
|
| 117 |
+
def CheckValue(self, proposed_value):
|
| 118 |
+
"""Type check the provided value and return it.
|
| 119 |
+
|
| 120 |
+
The returned value might have been normalized to another type.
|
| 121 |
+
"""
|
| 122 |
+
if not isinstance(proposed_value, self._acceptable_types):
|
| 123 |
+
message = ('%.1024r has type %s, but expected one of: %s' %
|
| 124 |
+
(proposed_value, type(proposed_value), self._acceptable_types))
|
| 125 |
+
raise TypeError(message)
|
| 126 |
+
return proposed_value
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class TypeCheckerWithDefault(TypeChecker):
|
| 130 |
+
|
| 131 |
+
def __init__(self, default_value, *acceptable_types):
|
| 132 |
+
TypeChecker.__init__(self, *acceptable_types)
|
| 133 |
+
self._default_value = default_value
|
| 134 |
+
|
| 135 |
+
def DefaultValue(self):
|
| 136 |
+
return self._default_value
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class BoolValueChecker(object):
|
| 140 |
+
"""Type checker used for bool fields."""
|
| 141 |
+
|
| 142 |
+
def CheckValue(self, proposed_value):
|
| 143 |
+
if not hasattr(proposed_value, '__index__') or (
|
| 144 |
+
type(proposed_value).__module__ == 'numpy' and
|
| 145 |
+
type(proposed_value).__name__ == 'ndarray'):
|
| 146 |
+
message = ('%.1024r has type %s, but expected one of: %s' %
|
| 147 |
+
(proposed_value, type(proposed_value), (bool, int)))
|
| 148 |
+
raise TypeError(message)
|
| 149 |
+
return bool(proposed_value)
|
| 150 |
+
|
| 151 |
+
def DefaultValue(self):
|
| 152 |
+
return False
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
# IntValueChecker and its subclasses perform integer type-checks
|
| 156 |
+
# and bounds-checks.
|
| 157 |
+
class IntValueChecker(object):
|
| 158 |
+
|
| 159 |
+
"""Checker used for integer fields. Performs type-check and range check."""
|
| 160 |
+
|
| 161 |
+
def CheckValue(self, proposed_value):
|
| 162 |
+
if not hasattr(proposed_value, '__index__') or (
|
| 163 |
+
type(proposed_value).__module__ == 'numpy' and
|
| 164 |
+
type(proposed_value).__name__ == 'ndarray'):
|
| 165 |
+
message = ('%.1024r has type %s, but expected one of: %s' %
|
| 166 |
+
(proposed_value, type(proposed_value), (int,)))
|
| 167 |
+
raise TypeError(message)
|
| 168 |
+
|
| 169 |
+
if not self._MIN <= int(proposed_value) <= self._MAX:
|
| 170 |
+
raise ValueError('Value out of range: %d' % proposed_value)
|
| 171 |
+
# We force all values to int to make alternate implementations where the
|
| 172 |
+
# distinction is more significant (e.g. the C++ implementation) simpler.
|
| 173 |
+
proposed_value = int(proposed_value)
|
| 174 |
+
return proposed_value
|
| 175 |
+
|
| 176 |
+
def DefaultValue(self):
|
| 177 |
+
return 0
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
class EnumValueChecker(object):
|
| 181 |
+
|
| 182 |
+
"""Checker used for enum fields. Performs type-check and range check."""
|
| 183 |
+
|
| 184 |
+
def __init__(self, enum_type):
|
| 185 |
+
self._enum_type = enum_type
|
| 186 |
+
|
| 187 |
+
def CheckValue(self, proposed_value):
|
| 188 |
+
if not isinstance(proposed_value, numbers.Integral):
|
| 189 |
+
message = ('%.1024r has type %s, but expected one of: %s' %
|
| 190 |
+
(proposed_value, type(proposed_value), (int,)))
|
| 191 |
+
raise TypeError(message)
|
| 192 |
+
if int(proposed_value) not in self._enum_type.values_by_number:
|
| 193 |
+
raise ValueError('Unknown enum value: %d' % proposed_value)
|
| 194 |
+
return proposed_value
|
| 195 |
+
|
| 196 |
+
def DefaultValue(self):
|
| 197 |
+
return self._enum_type.values[0].number
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
class UnicodeValueChecker(object):
|
| 201 |
+
|
| 202 |
+
"""Checker used for string fields.
|
| 203 |
+
|
| 204 |
+
Always returns a unicode value, even if the input is of type str.
|
| 205 |
+
"""
|
| 206 |
+
|
| 207 |
+
def CheckValue(self, proposed_value):
|
| 208 |
+
if not isinstance(proposed_value, (bytes, str)):
|
| 209 |
+
message = ('%.1024r has type %s, but expected one of: %s' %
|
| 210 |
+
(proposed_value, type(proposed_value), (bytes, str)))
|
| 211 |
+
raise TypeError(message)
|
| 212 |
+
|
| 213 |
+
# If the value is of type 'bytes' make sure that it is valid UTF-8 data.
|
| 214 |
+
if isinstance(proposed_value, bytes):
|
| 215 |
+
try:
|
| 216 |
+
proposed_value = proposed_value.decode('utf-8')
|
| 217 |
+
except UnicodeDecodeError:
|
| 218 |
+
raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
|
| 219 |
+
'encoding. Non-UTF-8 strings must be converted to '
|
| 220 |
+
'unicode objects before being added.' %
|
| 221 |
+
(proposed_value))
|
| 222 |
+
else:
|
| 223 |
+
try:
|
| 224 |
+
proposed_value.encode('utf8')
|
| 225 |
+
except UnicodeEncodeError:
|
| 226 |
+
raise ValueError('%.1024r isn\'t a valid unicode string and '
|
| 227 |
+
'can\'t be encoded in UTF-8.'%
|
| 228 |
+
(proposed_value))
|
| 229 |
+
|
| 230 |
+
return proposed_value
|
| 231 |
+
|
| 232 |
+
def DefaultValue(self):
|
| 233 |
+
return u""
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
class Int32ValueChecker(IntValueChecker):
|
| 237 |
+
# We're sure to use ints instead of longs here since comparison may be more
|
| 238 |
+
# efficient.
|
| 239 |
+
_MIN = -2147483648
|
| 240 |
+
_MAX = 2147483647
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
class Uint32ValueChecker(IntValueChecker):
|
| 244 |
+
_MIN = 0
|
| 245 |
+
_MAX = (1 << 32) - 1
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class Int64ValueChecker(IntValueChecker):
|
| 249 |
+
_MIN = -(1 << 63)
|
| 250 |
+
_MAX = (1 << 63) - 1
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
class Uint64ValueChecker(IntValueChecker):
|
| 254 |
+
_MIN = 0
|
| 255 |
+
_MAX = (1 << 64) - 1
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
# The max 4 bytes float is about 3.4028234663852886e+38
|
| 259 |
+
_FLOAT_MAX = float.fromhex('0x1.fffffep+127')
|
| 260 |
+
_FLOAT_MIN = -_FLOAT_MAX
|
| 261 |
+
_INF = float('inf')
|
| 262 |
+
_NEG_INF = float('-inf')
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
class DoubleValueChecker(object):
|
| 266 |
+
"""Checker used for double fields.
|
| 267 |
+
|
| 268 |
+
Performs type-check and range check.
|
| 269 |
+
"""
|
| 270 |
+
|
| 271 |
+
def CheckValue(self, proposed_value):
|
| 272 |
+
"""Check and convert proposed_value to float."""
|
| 273 |
+
if (not hasattr(proposed_value, '__float__') and
|
| 274 |
+
not hasattr(proposed_value, '__index__')) or (
|
| 275 |
+
type(proposed_value).__module__ == 'numpy' and
|
| 276 |
+
type(proposed_value).__name__ == 'ndarray'):
|
| 277 |
+
message = ('%.1024r has type %s, but expected one of: int, float' %
|
| 278 |
+
(proposed_value, type(proposed_value)))
|
| 279 |
+
raise TypeError(message)
|
| 280 |
+
return float(proposed_value)
|
| 281 |
+
|
| 282 |
+
def DefaultValue(self):
|
| 283 |
+
return 0.0
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
class FloatValueChecker(DoubleValueChecker):
|
| 287 |
+
"""Checker used for float fields.
|
| 288 |
+
|
| 289 |
+
Performs type-check and range check.
|
| 290 |
+
|
| 291 |
+
Values exceeding a 32-bit float will be converted to inf/-inf.
|
| 292 |
+
"""
|
| 293 |
+
|
| 294 |
+
def CheckValue(self, proposed_value):
|
| 295 |
+
"""Check and convert proposed_value to float."""
|
| 296 |
+
converted_value = super().CheckValue(proposed_value)
|
| 297 |
+
# This inf rounding matches the C++ proto SafeDoubleToFloat logic.
|
| 298 |
+
if converted_value > _FLOAT_MAX:
|
| 299 |
+
return _INF
|
| 300 |
+
if converted_value < _FLOAT_MIN:
|
| 301 |
+
return _NEG_INF
|
| 302 |
+
|
| 303 |
+
return TruncateToFourByteFloat(converted_value)
|
| 304 |
+
|
| 305 |
+
# Type-checkers for all scalar CPPTYPEs.
|
| 306 |
+
_VALUE_CHECKERS = {
|
| 307 |
+
_FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
|
| 308 |
+
_FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
|
| 309 |
+
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
|
| 310 |
+
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
|
| 311 |
+
_FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(),
|
| 312 |
+
_FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
|
| 313 |
+
_FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(),
|
| 314 |
+
_FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
|
| 315 |
+
}
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
# Map from field type to a function F, such that F(field_num, value)
|
| 319 |
+
# gives the total byte size for a value of the given type. This
|
| 320 |
+
# byte size includes tag information and any other additional space
|
| 321 |
+
# associated with serializing "value".
|
| 322 |
+
TYPE_TO_BYTE_SIZE_FN = {
|
| 323 |
+
_FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
|
| 324 |
+
_FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
|
| 325 |
+
_FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
|
| 326 |
+
_FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
|
| 327 |
+
_FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
|
| 328 |
+
_FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
|
| 329 |
+
_FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
|
| 330 |
+
_FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
|
| 331 |
+
_FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
|
| 332 |
+
_FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
|
| 333 |
+
_FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
|
| 334 |
+
_FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
|
| 335 |
+
_FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
|
| 336 |
+
_FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
|
| 337 |
+
_FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
|
| 338 |
+
_FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
|
| 339 |
+
_FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
|
| 340 |
+
_FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
|
| 341 |
+
}
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
# Maps from field types to encoder constructors.
|
| 345 |
+
TYPE_TO_ENCODER = {
|
| 346 |
+
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
|
| 347 |
+
_FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
|
| 348 |
+
_FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
|
| 349 |
+
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
|
| 350 |
+
_FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
|
| 351 |
+
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
|
| 352 |
+
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
|
| 353 |
+
_FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
|
| 354 |
+
_FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
|
| 355 |
+
_FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
|
| 356 |
+
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
|
| 357 |
+
_FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
|
| 358 |
+
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
|
| 359 |
+
_FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
|
| 360 |
+
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
|
| 361 |
+
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
|
| 362 |
+
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
|
| 363 |
+
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
|
| 364 |
+
}
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
# Maps from field types to sizer constructors.
|
| 368 |
+
TYPE_TO_SIZER = {
|
| 369 |
+
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
|
| 370 |
+
_FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
|
| 371 |
+
_FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
|
| 372 |
+
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
|
| 373 |
+
_FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
|
| 374 |
+
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
|
| 375 |
+
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
|
| 376 |
+
_FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
|
| 377 |
+
_FieldDescriptor.TYPE_STRING: encoder.StringSizer,
|
| 378 |
+
_FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
|
| 379 |
+
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
|
| 380 |
+
_FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
|
| 381 |
+
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
|
| 382 |
+
_FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
|
| 383 |
+
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
|
| 384 |
+
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
|
| 385 |
+
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
|
| 386 |
+
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
|
| 387 |
+
}
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
# Maps from field type to a decoder constructor.
|
| 391 |
+
TYPE_TO_DECODER = {
|
| 392 |
+
_FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
|
| 393 |
+
_FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
|
| 394 |
+
_FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
|
| 395 |
+
_FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
|
| 396 |
+
_FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
|
| 397 |
+
_FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
|
| 398 |
+
_FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
|
| 399 |
+
_FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
|
| 400 |
+
_FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
|
| 401 |
+
_FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
|
| 402 |
+
_FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
|
| 403 |
+
_FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
|
| 404 |
+
_FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
|
| 405 |
+
_FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
|
| 406 |
+
_FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
|
| 407 |
+
_FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
|
| 408 |
+
_FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
|
| 409 |
+
_FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
|
| 410 |
+
}
|
| 411 |
+
|
| 412 |
+
# Maps from field type to expected wiretype.
|
| 413 |
+
FIELD_TYPE_TO_WIRE_TYPE = {
|
| 414 |
+
_FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
|
| 415 |
+
_FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
|
| 416 |
+
_FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
|
| 417 |
+
_FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
|
| 418 |
+
_FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
|
| 419 |
+
_FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
|
| 420 |
+
_FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
|
| 421 |
+
_FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
|
| 422 |
+
_FieldDescriptor.TYPE_STRING:
|
| 423 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
| 424 |
+
_FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
|
| 425 |
+
_FieldDescriptor.TYPE_MESSAGE:
|
| 426 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
| 427 |
+
_FieldDescriptor.TYPE_BYTES:
|
| 428 |
+
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
| 429 |
+
_FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
|
| 430 |
+
_FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
|
| 431 |
+
_FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
|
| 432 |
+
_FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
|
| 433 |
+
_FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
|
| 434 |
+
_FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
|
| 435 |
+
}
|
PaddleOCR_ali1k_det_rec_300epoch_standalone/google/protobuf/internal/well_known_types.py
ADDED
|
@@ -0,0 +1,878 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Protocol Buffers - Google's data interchange format
|
| 2 |
+
# Copyright 2008 Google Inc. All rights reserved.
|
| 3 |
+
# https://developers.google.com/protocol-buffers/
|
| 4 |
+
#
|
| 5 |
+
# Redistribution and use in source and binary forms, with or without
|
| 6 |
+
# modification, are permitted provided that the following conditions are
|
| 7 |
+
# met:
|
| 8 |
+
#
|
| 9 |
+
# * Redistributions of source code must retain the above copyright
|
| 10 |
+
# notice, this list of conditions and the following disclaimer.
|
| 11 |
+
# * Redistributions in binary form must reproduce the above
|
| 12 |
+
# copyright notice, this list of conditions and the following disclaimer
|
| 13 |
+
# in the documentation and/or other materials provided with the
|
| 14 |
+
# distribution.
|
| 15 |
+
# * Neither the name of Google Inc. nor the names of its
|
| 16 |
+
# contributors may be used to endorse or promote products derived from
|
| 17 |
+
# this software without specific prior written permission.
|
| 18 |
+
#
|
| 19 |
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 20 |
+
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 21 |
+
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
| 22 |
+
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 23 |
+
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 24 |
+
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
| 25 |
+
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
| 26 |
+
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
| 27 |
+
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
| 28 |
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 29 |
+
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
| 30 |
+
|
| 31 |
+
"""Contains well known classes.
|
| 32 |
+
|
| 33 |
+
This files defines well known classes which need extra maintenance including:
|
| 34 |
+
- Any
|
| 35 |
+
- Duration
|
| 36 |
+
- FieldMask
|
| 37 |
+
- Struct
|
| 38 |
+
- Timestamp
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
__author__ = 'jieluo@google.com (Jie Luo)'
|
| 42 |
+
|
| 43 |
+
import calendar
|
| 44 |
+
import collections.abc
|
| 45 |
+
import datetime
|
| 46 |
+
|
| 47 |
+
from google.protobuf.descriptor import FieldDescriptor
|
| 48 |
+
|
| 49 |
+
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
|
| 50 |
+
_NANOS_PER_SECOND = 1000000000
|
| 51 |
+
_NANOS_PER_MILLISECOND = 1000000
|
| 52 |
+
_NANOS_PER_MICROSECOND = 1000
|
| 53 |
+
_MILLIS_PER_SECOND = 1000
|
| 54 |
+
_MICROS_PER_SECOND = 1000000
|
| 55 |
+
_SECONDS_PER_DAY = 24 * 3600
|
| 56 |
+
_DURATION_SECONDS_MAX = 315576000000
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class Any(object):
|
| 60 |
+
"""Class for Any Message type."""
|
| 61 |
+
|
| 62 |
+
__slots__ = ()
|
| 63 |
+
|
| 64 |
+
def Pack(self, msg, type_url_prefix='type.googleapis.com/',
|
| 65 |
+
deterministic=None):
|
| 66 |
+
"""Packs the specified message into current Any message."""
|
| 67 |
+
if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
|
| 68 |
+
self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
|
| 69 |
+
else:
|
| 70 |
+
self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
|
| 71 |
+
self.value = msg.SerializeToString(deterministic=deterministic)
|
| 72 |
+
|
| 73 |
+
def Unpack(self, msg):
|
| 74 |
+
"""Unpacks the current Any message into specified message."""
|
| 75 |
+
descriptor = msg.DESCRIPTOR
|
| 76 |
+
if not self.Is(descriptor):
|
| 77 |
+
return False
|
| 78 |
+
msg.ParseFromString(self.value)
|
| 79 |
+
return True
|
| 80 |
+
|
| 81 |
+
def TypeName(self):
|
| 82 |
+
"""Returns the protobuf type name of the inner message."""
|
| 83 |
+
# Only last part is to be used: b/25630112
|
| 84 |
+
return self.type_url.split('/')[-1]
|
| 85 |
+
|
| 86 |
+
def Is(self, descriptor):
|
| 87 |
+
"""Checks if this Any represents the given protobuf type."""
|
| 88 |
+
return '/' in self.type_url and self.TypeName() == descriptor.full_name
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
_EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0)
|
| 92 |
+
_EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp(
|
| 93 |
+
0, tz=datetime.timezone.utc)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class Timestamp(object):
|
| 97 |
+
"""Class for Timestamp message type."""
|
| 98 |
+
|
| 99 |
+
__slots__ = ()
|
| 100 |
+
|
| 101 |
+
def ToJsonString(self):
|
| 102 |
+
"""Converts Timestamp to RFC 3339 date string format.
|
| 103 |
+
|
| 104 |
+
Returns:
|
| 105 |
+
A string converted from timestamp. The string is always Z-normalized
|
| 106 |
+
and uses 3, 6 or 9 fractional digits as required to represent the
|
| 107 |
+
exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
|
| 108 |
+
"""
|
| 109 |
+
nanos = self.nanos % _NANOS_PER_SECOND
|
| 110 |
+
total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
|
| 111 |
+
seconds = total_sec % _SECONDS_PER_DAY
|
| 112 |
+
days = (total_sec - seconds) // _SECONDS_PER_DAY
|
| 113 |
+
dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds)
|
| 114 |
+
|
| 115 |
+
result = dt.isoformat()
|
| 116 |
+
if (nanos % 1e9) == 0:
|
| 117 |
+
# If there are 0 fractional digits, the fractional
|
| 118 |
+
# point '.' should be omitted when serializing.
|
| 119 |
+
return result + 'Z'
|
| 120 |
+
if (nanos % 1e6) == 0:
|
| 121 |
+
# Serialize 3 fractional digits.
|
| 122 |
+
return result + '.%03dZ' % (nanos / 1e6)
|
| 123 |
+
if (nanos % 1e3) == 0:
|
| 124 |
+
# Serialize 6 fractional digits.
|
| 125 |
+
return result + '.%06dZ' % (nanos / 1e3)
|
| 126 |
+
# Serialize 9 fractional digits.
|
| 127 |
+
return result + '.%09dZ' % nanos
|
| 128 |
+
|
| 129 |
+
def FromJsonString(self, value):
|
| 130 |
+
"""Parse a RFC 3339 date string format to Timestamp.
|
| 131 |
+
|
| 132 |
+
Args:
|
| 133 |
+
value: A date string. Any fractional digits (or none) and any offset are
|
| 134 |
+
accepted as long as they fit into nano-seconds precision.
|
| 135 |
+
Example of accepted format: '1972-01-01T10:00:20.021-05:00'
|
| 136 |
+
|
| 137 |
+
Raises:
|
| 138 |
+
ValueError: On parsing problems.
|
| 139 |
+
"""
|
| 140 |
+
if not isinstance(value, str):
|
| 141 |
+
raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
|
| 142 |
+
timezone_offset = value.find('Z')
|
| 143 |
+
if timezone_offset == -1:
|
| 144 |
+
timezone_offset = value.find('+')
|
| 145 |
+
if timezone_offset == -1:
|
| 146 |
+
timezone_offset = value.rfind('-')
|
| 147 |
+
if timezone_offset == -1:
|
| 148 |
+
raise ValueError(
|
| 149 |
+
'Failed to parse timestamp: missing valid timezone offset.')
|
| 150 |
+
time_value = value[0:timezone_offset]
|
| 151 |
+
# Parse datetime and nanos.
|
| 152 |
+
point_position = time_value.find('.')
|
| 153 |
+
if point_position == -1:
|
| 154 |
+
second_value = time_value
|
| 155 |
+
nano_value = ''
|
| 156 |
+
else:
|
| 157 |
+
second_value = time_value[:point_position]
|
| 158 |
+
nano_value = time_value[point_position + 1:]
|
| 159 |
+
if 't' in second_value:
|
| 160 |
+
raise ValueError(
|
| 161 |
+
'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', '
|
| 162 |
+
'lowercase \'t\' is not accepted'.format(second_value))
|
| 163 |
+
date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT)
|
| 164 |
+
td = date_object - datetime.datetime(1970, 1, 1)
|
| 165 |
+
seconds = td.seconds + td.days * _SECONDS_PER_DAY
|
| 166 |
+
if len(nano_value) > 9:
|
| 167 |
+
raise ValueError(
|
| 168 |
+
'Failed to parse Timestamp: nanos {0} more than '
|
| 169 |
+
'9 fractional digits.'.format(nano_value))
|
| 170 |
+
if nano_value:
|
| 171 |
+
nanos = round(float('0.' + nano_value) * 1e9)
|
| 172 |
+
else:
|
| 173 |
+
nanos = 0
|
| 174 |
+
# Parse timezone offsets.
|
| 175 |
+
if value[timezone_offset] == 'Z':
|
| 176 |
+
if len(value) != timezone_offset + 1:
|
| 177 |
+
raise ValueError('Failed to parse timestamp: invalid trailing'
|
| 178 |
+
' data {0}.'.format(value))
|
| 179 |
+
else:
|
| 180 |
+
timezone = value[timezone_offset:]
|
| 181 |
+
pos = timezone.find(':')
|
| 182 |
+
if pos == -1:
|
| 183 |
+
raise ValueError(
|
| 184 |
+
'Invalid timezone offset value: {0}.'.format(timezone))
|
| 185 |
+
if timezone[0] == '+':
|
| 186 |
+
seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
|
| 187 |
+
else:
|
| 188 |
+
seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
|
| 189 |
+
# Set seconds and nanos
|
| 190 |
+
self.seconds = int(seconds)
|
| 191 |
+
self.nanos = int(nanos)
|
| 192 |
+
|
| 193 |
+
def GetCurrentTime(self):
|
| 194 |
+
"""Get the current UTC into Timestamp."""
|
| 195 |
+
self.FromDatetime(datetime.datetime.utcnow())
|
| 196 |
+
|
| 197 |
+
def ToNanoseconds(self):
|
| 198 |
+
"""Converts Timestamp to nanoseconds since epoch."""
|
| 199 |
+
return self.seconds * _NANOS_PER_SECOND + self.nanos
|
| 200 |
+
|
| 201 |
+
def ToMicroseconds(self):
|
| 202 |
+
"""Converts Timestamp to microseconds since epoch."""
|
| 203 |
+
return (self.seconds * _MICROS_PER_SECOND +
|
| 204 |
+
self.nanos // _NANOS_PER_MICROSECOND)
|
| 205 |
+
|
| 206 |
+
def ToMilliseconds(self):
|
| 207 |
+
"""Converts Timestamp to milliseconds since epoch."""
|
| 208 |
+
return (self.seconds * _MILLIS_PER_SECOND +
|
| 209 |
+
self.nanos // _NANOS_PER_MILLISECOND)
|
| 210 |
+
|
| 211 |
+
def ToSeconds(self):
|
| 212 |
+
"""Converts Timestamp to seconds since epoch."""
|
| 213 |
+
return self.seconds
|
| 214 |
+
|
| 215 |
+
def FromNanoseconds(self, nanos):
|
| 216 |
+
"""Converts nanoseconds since epoch to Timestamp."""
|
| 217 |
+
self.seconds = nanos // _NANOS_PER_SECOND
|
| 218 |
+
self.nanos = nanos % _NANOS_PER_SECOND
|
| 219 |
+
|
| 220 |
+
def FromMicroseconds(self, micros):
|
| 221 |
+
"""Converts microseconds since epoch to Timestamp."""
|
| 222 |
+
self.seconds = micros // _MICROS_PER_SECOND
|
| 223 |
+
self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
|
| 224 |
+
|
| 225 |
+
def FromMilliseconds(self, millis):
|
| 226 |
+
"""Converts milliseconds since epoch to Timestamp."""
|
| 227 |
+
self.seconds = millis // _MILLIS_PER_SECOND
|
| 228 |
+
self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
|
| 229 |
+
|
| 230 |
+
def FromSeconds(self, seconds):
|
| 231 |
+
"""Converts seconds since epoch to Timestamp."""
|
| 232 |
+
self.seconds = seconds
|
| 233 |
+
self.nanos = 0
|
| 234 |
+
|
| 235 |
+
def ToDatetime(self, tzinfo=None):
|
| 236 |
+
"""Converts Timestamp to a datetime.
|
| 237 |
+
|
| 238 |
+
Args:
|
| 239 |
+
tzinfo: A datetime.tzinfo subclass; defaults to None.
|
| 240 |
+
|
| 241 |
+
Returns:
|
| 242 |
+
If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone
|
| 243 |
+
information, i.e. not aware that it's UTC).
|
| 244 |
+
|
| 245 |
+
Otherwise, returns a timezone-aware datetime in the input timezone.
|
| 246 |
+
"""
|
| 247 |
+
delta = datetime.timedelta(
|
| 248 |
+
seconds=self.seconds,
|
| 249 |
+
microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND))
|
| 250 |
+
if tzinfo is None:
|
| 251 |
+
return _EPOCH_DATETIME_NAIVE + delta
|
| 252 |
+
else:
|
| 253 |
+
return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta
|
| 254 |
+
|
| 255 |
+
def FromDatetime(self, dt):
|
| 256 |
+
"""Converts datetime to Timestamp.
|
| 257 |
+
|
| 258 |
+
Args:
|
| 259 |
+
dt: A datetime. If it's timezone-naive, it's assumed to be in UTC.
|
| 260 |
+
"""
|
| 261 |
+
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
|
| 262 |
+
# And this conversion guide: http://docs.python.org/library/time.html
|
| 263 |
+
|
| 264 |
+
# Turn the date parameter into a tuple (struct_time) that can then be
|
| 265 |
+
# manipulated into a long value of seconds. During the conversion from
|
| 266 |
+
# struct_time to long, the source date in UTC, and so it follows that the
|
| 267 |
+
# correct transformation is calendar.timegm()
|
| 268 |
+
self.seconds = calendar.timegm(dt.utctimetuple())
|
| 269 |
+
self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
class Duration(object):
|
| 273 |
+
"""Class for Duration message type."""
|
| 274 |
+
|
| 275 |
+
__slots__ = ()
|
| 276 |
+
|
| 277 |
+
def ToJsonString(self):
|
| 278 |
+
"""Converts Duration to string format.
|
| 279 |
+
|
| 280 |
+
Returns:
|
| 281 |
+
A string converted from self. The string format will contains
|
| 282 |
+
3, 6, or 9 fractional digits depending on the precision required to
|
| 283 |
+
represent the exact Duration value. For example: "1s", "1.010s",
|
| 284 |
+
"1.000000100s", "-3.100s"
|
| 285 |
+
"""
|
| 286 |
+
_CheckDurationValid(self.seconds, self.nanos)
|
| 287 |
+
if self.seconds < 0 or self.nanos < 0:
|
| 288 |
+
result = '-'
|
| 289 |
+
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
|
| 290 |
+
nanos = (0 - self.nanos) % 1e9
|
| 291 |
+
else:
|
| 292 |
+
result = ''
|
| 293 |
+
seconds = self.seconds + int(self.nanos // 1e9)
|
| 294 |
+
nanos = self.nanos % 1e9
|
| 295 |
+
result += '%d' % seconds
|
| 296 |
+
if (nanos % 1e9) == 0:
|
| 297 |
+
# If there are 0 fractional digits, the fractional
|
| 298 |
+
# point '.' should be omitted when serializing.
|
| 299 |
+
return result + 's'
|
| 300 |
+
if (nanos % 1e6) == 0:
|
| 301 |
+
# Serialize 3 fractional digits.
|
| 302 |
+
return result + '.%03ds' % (nanos / 1e6)
|
| 303 |
+
if (nanos % 1e3) == 0:
|
| 304 |
+
# Serialize 6 fractional digits.
|
| 305 |
+
return result + '.%06ds' % (nanos / 1e3)
|
| 306 |
+
# Serialize 9 fractional digits.
|
| 307 |
+
return result + '.%09ds' % nanos
|
| 308 |
+
|
| 309 |
+
def FromJsonString(self, value):
|
| 310 |
+
"""Converts a string to Duration.
|
| 311 |
+
|
| 312 |
+
Args:
|
| 313 |
+
value: A string to be converted. The string must end with 's'. Any
|
| 314 |
+
fractional digits (or none) are accepted as long as they fit into
|
| 315 |
+
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
|
| 316 |
+
|
| 317 |
+
Raises:
|
| 318 |
+
ValueError: On parsing problems.
|
| 319 |
+
"""
|
| 320 |
+
if not isinstance(value, str):
|
| 321 |
+
raise ValueError('Duration JSON value not a string: {!r}'.format(value))
|
| 322 |
+
if len(value) < 1 or value[-1] != 's':
|
| 323 |
+
raise ValueError(
|
| 324 |
+
'Duration must end with letter "s": {0}.'.format(value))
|
| 325 |
+
try:
|
| 326 |
+
pos = value.find('.')
|
| 327 |
+
if pos == -1:
|
| 328 |
+
seconds = int(value[:-1])
|
| 329 |
+
nanos = 0
|
| 330 |
+
else:
|
| 331 |
+
seconds = int(value[:pos])
|
| 332 |
+
if value[0] == '-':
|
| 333 |
+
nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
|
| 334 |
+
else:
|
| 335 |
+
nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
|
| 336 |
+
_CheckDurationValid(seconds, nanos)
|
| 337 |
+
self.seconds = seconds
|
| 338 |
+
self.nanos = nanos
|
| 339 |
+
except ValueError as e:
|
| 340 |
+
raise ValueError(
|
| 341 |
+
'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
|
| 342 |
+
|
| 343 |
+
def ToNanoseconds(self):
|
| 344 |
+
"""Converts a Duration to nanoseconds."""
|
| 345 |
+
return self.seconds * _NANOS_PER_SECOND + self.nanos
|
| 346 |
+
|
| 347 |
+
def ToMicroseconds(self):
|
| 348 |
+
"""Converts a Duration to microseconds."""
|
| 349 |
+
micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
|
| 350 |
+
return self.seconds * _MICROS_PER_SECOND + micros
|
| 351 |
+
|
| 352 |
+
def ToMilliseconds(self):
|
| 353 |
+
"""Converts a Duration to milliseconds."""
|
| 354 |
+
millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
|
| 355 |
+
return self.seconds * _MILLIS_PER_SECOND + millis
|
| 356 |
+
|
| 357 |
+
def ToSeconds(self):
|
| 358 |
+
"""Converts a Duration to seconds."""
|
| 359 |
+
return self.seconds
|
| 360 |
+
|
| 361 |
+
def FromNanoseconds(self, nanos):
|
| 362 |
+
"""Converts nanoseconds to Duration."""
|
| 363 |
+
self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
|
| 364 |
+
nanos % _NANOS_PER_SECOND)
|
| 365 |
+
|
| 366 |
+
def FromMicroseconds(self, micros):
|
| 367 |
+
"""Converts microseconds to Duration."""
|
| 368 |
+
self._NormalizeDuration(
|
| 369 |
+
micros // _MICROS_PER_SECOND,
|
| 370 |
+
(micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
|
| 371 |
+
|
| 372 |
+
def FromMilliseconds(self, millis):
|
| 373 |
+
"""Converts milliseconds to Duration."""
|
| 374 |
+
self._NormalizeDuration(
|
| 375 |
+
millis // _MILLIS_PER_SECOND,
|
| 376 |
+
(millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
|
| 377 |
+
|
| 378 |
+
def FromSeconds(self, seconds):
|
| 379 |
+
"""Converts seconds to Duration."""
|
| 380 |
+
self.seconds = seconds
|
| 381 |
+
self.nanos = 0
|
| 382 |
+
|
| 383 |
+
def ToTimedelta(self):
|
| 384 |
+
"""Converts Duration to timedelta."""
|
| 385 |
+
return datetime.timedelta(
|
| 386 |
+
seconds=self.seconds, microseconds=_RoundTowardZero(
|
| 387 |
+
self.nanos, _NANOS_PER_MICROSECOND))
|
| 388 |
+
|
| 389 |
+
def FromTimedelta(self, td):
|
| 390 |
+
"""Converts timedelta to Duration."""
|
| 391 |
+
self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
|
| 392 |
+
td.microseconds * _NANOS_PER_MICROSECOND)
|
| 393 |
+
|
| 394 |
+
def _NormalizeDuration(self, seconds, nanos):
|
| 395 |
+
"""Set Duration by seconds and nanos."""
|
| 396 |
+
# Force nanos to be negative if the duration is negative.
|
| 397 |
+
if seconds < 0 and nanos > 0:
|
| 398 |
+
seconds += 1
|
| 399 |
+
nanos -= _NANOS_PER_SECOND
|
| 400 |
+
self.seconds = seconds
|
| 401 |
+
self.nanos = nanos
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def _CheckDurationValid(seconds, nanos):
|
| 405 |
+
if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
|
| 406 |
+
raise ValueError(
|
| 407 |
+
'Duration is not valid: Seconds {0} must be in range '
|
| 408 |
+
'[-315576000000, 315576000000].'.format(seconds))
|
| 409 |
+
if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
|
| 410 |
+
raise ValueError(
|
| 411 |
+
'Duration is not valid: Nanos {0} must be in range '
|
| 412 |
+
'[-999999999, 999999999].'.format(nanos))
|
| 413 |
+
if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
|
| 414 |
+
raise ValueError(
|
| 415 |
+
'Duration is not valid: Sign mismatch.')
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
def _RoundTowardZero(value, divider):
|
| 419 |
+
"""Truncates the remainder part after division."""
|
| 420 |
+
# For some languages, the sign of the remainder is implementation
|
| 421 |
+
# dependent if any of the operands is negative. Here we enforce
|
| 422 |
+
# "rounded toward zero" semantics. For example, for (-5) / 2 an
|
| 423 |
+
# implementation may give -3 as the result with the remainder being
|
| 424 |
+
# 1. This function ensures we always return -2 (closer to zero).
|
| 425 |
+
result = value // divider
|
| 426 |
+
remainder = value % divider
|
| 427 |
+
if result < 0 and remainder > 0:
|
| 428 |
+
return result + 1
|
| 429 |
+
else:
|
| 430 |
+
return result
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
class FieldMask(object):
|
| 434 |
+
"""Class for FieldMask message type."""
|
| 435 |
+
|
| 436 |
+
__slots__ = ()
|
| 437 |
+
|
| 438 |
+
def ToJsonString(self):
|
| 439 |
+
"""Converts FieldMask to string according to proto3 JSON spec."""
|
| 440 |
+
camelcase_paths = []
|
| 441 |
+
for path in self.paths:
|
| 442 |
+
camelcase_paths.append(_SnakeCaseToCamelCase(path))
|
| 443 |
+
return ','.join(camelcase_paths)
|
| 444 |
+
|
| 445 |
+
def FromJsonString(self, value):
|
| 446 |
+
"""Converts string to FieldMask according to proto3 JSON spec."""
|
| 447 |
+
if not isinstance(value, str):
|
| 448 |
+
raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
|
| 449 |
+
self.Clear()
|
| 450 |
+
if value:
|
| 451 |
+
for path in value.split(','):
|
| 452 |
+
self.paths.append(_CamelCaseToSnakeCase(path))
|
| 453 |
+
|
| 454 |
+
def IsValidForDescriptor(self, message_descriptor):
|
| 455 |
+
"""Checks whether the FieldMask is valid for Message Descriptor."""
|
| 456 |
+
for path in self.paths:
|
| 457 |
+
if not _IsValidPath(message_descriptor, path):
|
| 458 |
+
return False
|
| 459 |
+
return True
|
| 460 |
+
|
| 461 |
+
def AllFieldsFromDescriptor(self, message_descriptor):
|
| 462 |
+
"""Gets all direct fields of Message Descriptor to FieldMask."""
|
| 463 |
+
self.Clear()
|
| 464 |
+
for field in message_descriptor.fields:
|
| 465 |
+
self.paths.append(field.name)
|
| 466 |
+
|
| 467 |
+
def CanonicalFormFromMask(self, mask):
|
| 468 |
+
"""Converts a FieldMask to the canonical form.
|
| 469 |
+
|
| 470 |
+
Removes paths that are covered by another path. For example,
|
| 471 |
+
"foo.bar" is covered by "foo" and will be removed if "foo"
|
| 472 |
+
is also in the FieldMask. Then sorts all paths in alphabetical order.
|
| 473 |
+
|
| 474 |
+
Args:
|
| 475 |
+
mask: The original FieldMask to be converted.
|
| 476 |
+
"""
|
| 477 |
+
tree = _FieldMaskTree(mask)
|
| 478 |
+
tree.ToFieldMask(self)
|
| 479 |
+
|
| 480 |
+
def Union(self, mask1, mask2):
|
| 481 |
+
"""Merges mask1 and mask2 into this FieldMask."""
|
| 482 |
+
_CheckFieldMaskMessage(mask1)
|
| 483 |
+
_CheckFieldMaskMessage(mask2)
|
| 484 |
+
tree = _FieldMaskTree(mask1)
|
| 485 |
+
tree.MergeFromFieldMask(mask2)
|
| 486 |
+
tree.ToFieldMask(self)
|
| 487 |
+
|
| 488 |
+
def Intersect(self, mask1, mask2):
|
| 489 |
+
"""Intersects mask1 and mask2 into this FieldMask."""
|
| 490 |
+
_CheckFieldMaskMessage(mask1)
|
| 491 |
+
_CheckFieldMaskMessage(mask2)
|
| 492 |
+
tree = _FieldMaskTree(mask1)
|
| 493 |
+
intersection = _FieldMaskTree()
|
| 494 |
+
for path in mask2.paths:
|
| 495 |
+
tree.IntersectPath(path, intersection)
|
| 496 |
+
intersection.ToFieldMask(self)
|
| 497 |
+
|
| 498 |
+
def MergeMessage(
|
| 499 |
+
self, source, destination,
|
| 500 |
+
replace_message_field=False, replace_repeated_field=False):
|
| 501 |
+
"""Merges fields specified in FieldMask from source to destination.
|
| 502 |
+
|
| 503 |
+
Args:
|
| 504 |
+
source: Source message.
|
| 505 |
+
destination: The destination message to be merged into.
|
| 506 |
+
replace_message_field: Replace message field if True. Merge message
|
| 507 |
+
field if False.
|
| 508 |
+
replace_repeated_field: Replace repeated field if True. Append
|
| 509 |
+
elements of repeated field if False.
|
| 510 |
+
"""
|
| 511 |
+
tree = _FieldMaskTree(self)
|
| 512 |
+
tree.MergeMessage(
|
| 513 |
+
source, destination, replace_message_field, replace_repeated_field)
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
def _IsValidPath(message_descriptor, path):
|
| 517 |
+
"""Checks whether the path is valid for Message Descriptor."""
|
| 518 |
+
parts = path.split('.')
|
| 519 |
+
last = parts.pop()
|
| 520 |
+
for name in parts:
|
| 521 |
+
field = message_descriptor.fields_by_name.get(name)
|
| 522 |
+
if (field is None or
|
| 523 |
+
field.label == FieldDescriptor.LABEL_REPEATED or
|
| 524 |
+
field.type != FieldDescriptor.TYPE_MESSAGE):
|
| 525 |
+
return False
|
| 526 |
+
message_descriptor = field.message_type
|
| 527 |
+
return last in message_descriptor.fields_by_name
|
| 528 |
+
|
| 529 |
+
|
| 530 |
+
def _CheckFieldMaskMessage(message):
|
| 531 |
+
"""Raises ValueError if message is not a FieldMask."""
|
| 532 |
+
message_descriptor = message.DESCRIPTOR
|
| 533 |
+
if (message_descriptor.name != 'FieldMask' or
|
| 534 |
+
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
|
| 535 |
+
raise ValueError('Message {0} is not a FieldMask.'.format(
|
| 536 |
+
message_descriptor.full_name))
|
| 537 |
+
|
| 538 |
+
|
| 539 |
+
def _SnakeCaseToCamelCase(path_name):
|
| 540 |
+
"""Converts a path name from snake_case to camelCase."""
|
| 541 |
+
result = []
|
| 542 |
+
after_underscore = False
|
| 543 |
+
for c in path_name:
|
| 544 |
+
if c.isupper():
|
| 545 |
+
raise ValueError(
|
| 546 |
+
'Fail to print FieldMask to Json string: Path name '
|
| 547 |
+
'{0} must not contain uppercase letters.'.format(path_name))
|
| 548 |
+
if after_underscore:
|
| 549 |
+
if c.islower():
|
| 550 |
+
result.append(c.upper())
|
| 551 |
+
after_underscore = False
|
| 552 |
+
else:
|
| 553 |
+
raise ValueError(
|
| 554 |
+
'Fail to print FieldMask to Json string: The '
|
| 555 |
+
'character after a "_" must be a lowercase letter '
|
| 556 |
+
'in path name {0}.'.format(path_name))
|
| 557 |
+
elif c == '_':
|
| 558 |
+
after_underscore = True
|
| 559 |
+
else:
|
| 560 |
+
result += c
|
| 561 |
+
|
| 562 |
+
if after_underscore:
|
| 563 |
+
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
|
| 564 |
+
'in path name {0}.'.format(path_name))
|
| 565 |
+
return ''.join(result)
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
def _CamelCaseToSnakeCase(path_name):
|
| 569 |
+
"""Converts a field name from camelCase to snake_case."""
|
| 570 |
+
result = []
|
| 571 |
+
for c in path_name:
|
| 572 |
+
if c == '_':
|
| 573 |
+
raise ValueError('Fail to parse FieldMask: Path name '
|
| 574 |
+
'{0} must not contain "_"s.'.format(path_name))
|
| 575 |
+
if c.isupper():
|
| 576 |
+
result += '_'
|
| 577 |
+
result += c.lower()
|
| 578 |
+
else:
|
| 579 |
+
result += c
|
| 580 |
+
return ''.join(result)
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
class _FieldMaskTree(object):
|
| 584 |
+
"""Represents a FieldMask in a tree structure.
|
| 585 |
+
|
| 586 |
+
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
|
| 587 |
+
the FieldMaskTree will be:
|
| 588 |
+
[_root] -+- foo -+- bar
|
| 589 |
+
| |
|
| 590 |
+
| +- baz
|
| 591 |
+
|
|
| 592 |
+
+- bar --- baz
|
| 593 |
+
In the tree, each leaf node represents a field path.
|
| 594 |
+
"""
|
| 595 |
+
|
| 596 |
+
__slots__ = ('_root',)
|
| 597 |
+
|
| 598 |
+
def __init__(self, field_mask=None):
|
| 599 |
+
"""Initializes the tree by FieldMask."""
|
| 600 |
+
self._root = {}
|
| 601 |
+
if field_mask:
|
| 602 |
+
self.MergeFromFieldMask(field_mask)
|
| 603 |
+
|
| 604 |
+
def MergeFromFieldMask(self, field_mask):
|
| 605 |
+
"""Merges a FieldMask to the tree."""
|
| 606 |
+
for path in field_mask.paths:
|
| 607 |
+
self.AddPath(path)
|
| 608 |
+
|
| 609 |
+
def AddPath(self, path):
|
| 610 |
+
"""Adds a field path into the tree.
|
| 611 |
+
|
| 612 |
+
If the field path to add is a sub-path of an existing field path
|
| 613 |
+
in the tree (i.e., a leaf node), it means the tree already matches
|
| 614 |
+
the given path so nothing will be added to the tree. If the path
|
| 615 |
+
matches an existing non-leaf node in the tree, that non-leaf node
|
| 616 |
+
will be turned into a leaf node with all its children removed because
|
| 617 |
+
the path matches all the node's children. Otherwise, a new path will
|
| 618 |
+
be added.
|
| 619 |
+
|
| 620 |
+
Args:
|
| 621 |
+
path: The field path to add.
|
| 622 |
+
"""
|
| 623 |
+
node = self._root
|
| 624 |
+
for name in path.split('.'):
|
| 625 |
+
if name not in node:
|
| 626 |
+
node[name] = {}
|
| 627 |
+
elif not node[name]:
|
| 628 |
+
# Pre-existing empty node implies we already have this entire tree.
|
| 629 |
+
return
|
| 630 |
+
node = node[name]
|
| 631 |
+
# Remove any sub-trees we might have had.
|
| 632 |
+
node.clear()
|
| 633 |
+
|
| 634 |
+
def ToFieldMask(self, field_mask):
|
| 635 |
+
"""Converts the tree to a FieldMask."""
|
| 636 |
+
field_mask.Clear()
|
| 637 |
+
_AddFieldPaths(self._root, '', field_mask)
|
| 638 |
+
|
| 639 |
+
def IntersectPath(self, path, intersection):
|
| 640 |
+
"""Calculates the intersection part of a field path with this tree.
|
| 641 |
+
|
| 642 |
+
Args:
|
| 643 |
+
path: The field path to calculates.
|
| 644 |
+
intersection: The out tree to record the intersection part.
|
| 645 |
+
"""
|
| 646 |
+
node = self._root
|
| 647 |
+
for name in path.split('.'):
|
| 648 |
+
if name not in node:
|
| 649 |
+
return
|
| 650 |
+
elif not node[name]:
|
| 651 |
+
intersection.AddPath(path)
|
| 652 |
+
return
|
| 653 |
+
node = node[name]
|
| 654 |
+
intersection.AddLeafNodes(path, node)
|
| 655 |
+
|
| 656 |
+
def AddLeafNodes(self, prefix, node):
|
| 657 |
+
"""Adds leaf nodes begin with prefix to this tree."""
|
| 658 |
+
if not node:
|
| 659 |
+
self.AddPath(prefix)
|
| 660 |
+
for name in node:
|
| 661 |
+
child_path = prefix + '.' + name
|
| 662 |
+
self.AddLeafNodes(child_path, node[name])
|
| 663 |
+
|
| 664 |
+
def MergeMessage(
|
| 665 |
+
self, source, destination,
|
| 666 |
+
replace_message, replace_repeated):
|
| 667 |
+
"""Merge all fields specified by this tree from source to destination."""
|
| 668 |
+
_MergeMessage(
|
| 669 |
+
self._root, source, destination, replace_message, replace_repeated)
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
def _StrConvert(value):
|
| 673 |
+
"""Converts value to str if it is not."""
|
| 674 |
+
# This file is imported by c extension and some methods like ClearField
|
| 675 |
+
# requires string for the field name. py2/py3 has different text
|
| 676 |
+
# type and may use unicode.
|
| 677 |
+
if not isinstance(value, str):
|
| 678 |
+
return value.encode('utf-8')
|
| 679 |
+
return value
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
def _MergeMessage(
|
| 683 |
+
node, source, destination, replace_message, replace_repeated):
|
| 684 |
+
"""Merge all fields specified by a sub-tree from source to destination."""
|
| 685 |
+
source_descriptor = source.DESCRIPTOR
|
| 686 |
+
for name in node:
|
| 687 |
+
child = node[name]
|
| 688 |
+
field = source_descriptor.fields_by_name[name]
|
| 689 |
+
if field is None:
|
| 690 |
+
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
|
| 691 |
+
name, source_descriptor.full_name))
|
| 692 |
+
if child:
|
| 693 |
+
# Sub-paths are only allowed for singular message fields.
|
| 694 |
+
if (field.label == FieldDescriptor.LABEL_REPEATED or
|
| 695 |
+
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
|
| 696 |
+
raise ValueError('Error: Field {0} in message {1} is not a singular '
|
| 697 |
+
'message field and cannot have sub-fields.'.format(
|
| 698 |
+
name, source_descriptor.full_name))
|
| 699 |
+
if source.HasField(name):
|
| 700 |
+
_MergeMessage(
|
| 701 |
+
child, getattr(source, name), getattr(destination, name),
|
| 702 |
+
replace_message, replace_repeated)
|
| 703 |
+
continue
|
| 704 |
+
if field.label == FieldDescriptor.LABEL_REPEATED:
|
| 705 |
+
if replace_repeated:
|
| 706 |
+
destination.ClearField(_StrConvert(name))
|
| 707 |
+
repeated_source = getattr(source, name)
|
| 708 |
+
repeated_destination = getattr(destination, name)
|
| 709 |
+
repeated_destination.MergeFrom(repeated_source)
|
| 710 |
+
else:
|
| 711 |
+
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
|
| 712 |
+
if replace_message:
|
| 713 |
+
destination.ClearField(_StrConvert(name))
|
| 714 |
+
if source.HasField(name):
|
| 715 |
+
getattr(destination, name).MergeFrom(getattr(source, name))
|
| 716 |
+
else:
|
| 717 |
+
setattr(destination, name, getattr(source, name))
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
def _AddFieldPaths(node, prefix, field_mask):
|
| 721 |
+
"""Adds the field paths descended from node to field_mask."""
|
| 722 |
+
if not node and prefix:
|
| 723 |
+
field_mask.paths.append(prefix)
|
| 724 |
+
return
|
| 725 |
+
for name in sorted(node):
|
| 726 |
+
if prefix:
|
| 727 |
+
child_path = prefix + '.' + name
|
| 728 |
+
else:
|
| 729 |
+
child_path = name
|
| 730 |
+
_AddFieldPaths(node[name], child_path, field_mask)
|
| 731 |
+
|
| 732 |
+
|
| 733 |
+
def _SetStructValue(struct_value, value):
|
| 734 |
+
if value is None:
|
| 735 |
+
struct_value.null_value = 0
|
| 736 |
+
elif isinstance(value, bool):
|
| 737 |
+
# Note: this check must come before the number check because in Python
|
| 738 |
+
# True and False are also considered numbers.
|
| 739 |
+
struct_value.bool_value = value
|
| 740 |
+
elif isinstance(value, str):
|
| 741 |
+
struct_value.string_value = value
|
| 742 |
+
elif isinstance(value, (int, float)):
|
| 743 |
+
struct_value.number_value = value
|
| 744 |
+
elif isinstance(value, (dict, Struct)):
|
| 745 |
+
struct_value.struct_value.Clear()
|
| 746 |
+
struct_value.struct_value.update(value)
|
| 747 |
+
elif isinstance(value, (list, ListValue)):
|
| 748 |
+
struct_value.list_value.Clear()
|
| 749 |
+
struct_value.list_value.extend(value)
|
| 750 |
+
else:
|
| 751 |
+
raise ValueError('Unexpected type')
|
| 752 |
+
|
| 753 |
+
|
| 754 |
+
def _GetStructValue(struct_value):
|
| 755 |
+
which = struct_value.WhichOneof('kind')
|
| 756 |
+
if which == 'struct_value':
|
| 757 |
+
return struct_value.struct_value
|
| 758 |
+
elif which == 'null_value':
|
| 759 |
+
return None
|
| 760 |
+
elif which == 'number_value':
|
| 761 |
+
return struct_value.number_value
|
| 762 |
+
elif which == 'string_value':
|
| 763 |
+
return struct_value.string_value
|
| 764 |
+
elif which == 'bool_value':
|
| 765 |
+
return struct_value.bool_value
|
| 766 |
+
elif which == 'list_value':
|
| 767 |
+
return struct_value.list_value
|
| 768 |
+
elif which is None:
|
| 769 |
+
raise ValueError('Value not set')
|
| 770 |
+
|
| 771 |
+
|
| 772 |
+
class Struct(object):
|
| 773 |
+
"""Class for Struct message type."""
|
| 774 |
+
|
| 775 |
+
__slots__ = ()
|
| 776 |
+
|
| 777 |
+
def __getitem__(self, key):
|
| 778 |
+
return _GetStructValue(self.fields[key])
|
| 779 |
+
|
| 780 |
+
def __contains__(self, item):
|
| 781 |
+
return item in self.fields
|
| 782 |
+
|
| 783 |
+
def __setitem__(self, key, value):
|
| 784 |
+
_SetStructValue(self.fields[key], value)
|
| 785 |
+
|
| 786 |
+
def __delitem__(self, key):
|
| 787 |
+
del self.fields[key]
|
| 788 |
+
|
| 789 |
+
def __len__(self):
|
| 790 |
+
return len(self.fields)
|
| 791 |
+
|
| 792 |
+
def __iter__(self):
|
| 793 |
+
return iter(self.fields)
|
| 794 |
+
|
| 795 |
+
def keys(self): # pylint: disable=invalid-name
|
| 796 |
+
return self.fields.keys()
|
| 797 |
+
|
| 798 |
+
def values(self): # pylint: disable=invalid-name
|
| 799 |
+
return [self[key] for key in self]
|
| 800 |
+
|
| 801 |
+
def items(self): # pylint: disable=invalid-name
|
| 802 |
+
return [(key, self[key]) for key in self]
|
| 803 |
+
|
| 804 |
+
def get_or_create_list(self, key):
|
| 805 |
+
"""Returns a list for this key, creating if it didn't exist already."""
|
| 806 |
+
if not self.fields[key].HasField('list_value'):
|
| 807 |
+
# Clear will mark list_value modified which will indeed create a list.
|
| 808 |
+
self.fields[key].list_value.Clear()
|
| 809 |
+
return self.fields[key].list_value
|
| 810 |
+
|
| 811 |
+
def get_or_create_struct(self, key):
|
| 812 |
+
"""Returns a struct for this key, creating if it didn't exist already."""
|
| 813 |
+
if not self.fields[key].HasField('struct_value'):
|
| 814 |
+
# Clear will mark struct_value modified which will indeed create a struct.
|
| 815 |
+
self.fields[key].struct_value.Clear()
|
| 816 |
+
return self.fields[key].struct_value
|
| 817 |
+
|
| 818 |
+
def update(self, dictionary): # pylint: disable=invalid-name
|
| 819 |
+
for key, value in dictionary.items():
|
| 820 |
+
_SetStructValue(self.fields[key], value)
|
| 821 |
+
|
| 822 |
+
collections.abc.MutableMapping.register(Struct)
|
| 823 |
+
|
| 824 |
+
|
| 825 |
+
class ListValue(object):
|
| 826 |
+
"""Class for ListValue message type."""
|
| 827 |
+
|
| 828 |
+
__slots__ = ()
|
| 829 |
+
|
| 830 |
+
def __len__(self):
|
| 831 |
+
return len(self.values)
|
| 832 |
+
|
| 833 |
+
def append(self, value):
|
| 834 |
+
_SetStructValue(self.values.add(), value)
|
| 835 |
+
|
| 836 |
+
def extend(self, elem_seq):
|
| 837 |
+
for value in elem_seq:
|
| 838 |
+
self.append(value)
|
| 839 |
+
|
| 840 |
+
def __getitem__(self, index):
|
| 841 |
+
"""Retrieves item by the specified index."""
|
| 842 |
+
return _GetStructValue(self.values.__getitem__(index))
|
| 843 |
+
|
| 844 |
+
def __setitem__(self, index, value):
|
| 845 |
+
_SetStructValue(self.values.__getitem__(index), value)
|
| 846 |
+
|
| 847 |
+
def __delitem__(self, key):
|
| 848 |
+
del self.values[key]
|
| 849 |
+
|
| 850 |
+
def items(self):
|
| 851 |
+
for i in range(len(self)):
|
| 852 |
+
yield self[i]
|
| 853 |
+
|
| 854 |
+
def add_struct(self):
|
| 855 |
+
"""Appends and returns a struct value as the next value in the list."""
|
| 856 |
+
struct_value = self.values.add().struct_value
|
| 857 |
+
# Clear will mark struct_value modified which will indeed create a struct.
|
| 858 |
+
struct_value.Clear()
|
| 859 |
+
return struct_value
|
| 860 |
+
|
| 861 |
+
def add_list(self):
|
| 862 |
+
"""Appends and returns a list value as the next value in the list."""
|
| 863 |
+
list_value = self.values.add().list_value
|
| 864 |
+
# Clear will mark list_value modified which will indeed create a list.
|
| 865 |
+
list_value.Clear()
|
| 866 |
+
return list_value
|
| 867 |
+
|
| 868 |
+
collections.abc.MutableSequence.register(ListValue)
|
| 869 |
+
|
| 870 |
+
|
| 871 |
+
WKTBASES = {
|
| 872 |
+
'google.protobuf.Any': Any,
|
| 873 |
+
'google.protobuf.Duration': Duration,
|
| 874 |
+
'google.protobuf.FieldMask': FieldMask,
|
| 875 |
+
'google.protobuf.ListValue': ListValue,
|
| 876 |
+
'google.protobuf.Struct': Struct,
|
| 877 |
+
'google.protobuf.Timestamp': Timestamp,
|
| 878 |
+
}
|