ADAPT-Chase commited on
Commit
54de01d
·
verified ·
1 Parent(s): c27a279

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +11 -0
  2. tool_server/.venv/lib/python3.12/site-packages/google/_upb/_message.abi3.so +3 -0
  3. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/__init__.py +10 -0
  4. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/any.py +53 -0
  5. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/any_pb2.py +37 -0
  6. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/api_pb2.py +47 -0
  7. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/compiler/__init__.py +0 -0
  8. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/compiler/plugin_pb2.py +46 -0
  9. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor.py +1676 -0
  10. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_database.py +172 -0
  11. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_pb2.py +0 -0
  12. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_pool.py +1370 -0
  13. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/duration.py +100 -0
  14. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/duration_pb2.py +37 -0
  15. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/empty_pb2.py +37 -0
  16. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/field_mask_pb2.py +37 -0
  17. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/__init__.py +7 -0
  18. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/api_implementation.py +136 -0
  19. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/builder.py +118 -0
  20. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/containers.py +690 -0
  21. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/decoder.py +1066 -0
  22. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/encoder.py +806 -0
  23. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/enum_type_wrapper.py +112 -0
  24. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/extension_dict.py +194 -0
  25. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/field_mask.py +312 -0
  26. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/message_listener.py +55 -0
  27. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/python_edition_defaults.py +5 -0
  28. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/python_message.py +1591 -0
  29. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/testing_refleaks.py +128 -0
  30. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/type_checkers.py +455 -0
  31. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/well_known_types.py +695 -0
  32. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/wire_format.py +245 -0
  33. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/json_format.py +1107 -0
  34. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/message.py +448 -0
  35. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/message_factory.py +190 -0
  36. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto.py +153 -0
  37. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto_builder.py +111 -0
  38. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto_json.py +83 -0
  39. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto_text.py +129 -0
  40. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/pyext/__init__.py +0 -0
  41. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/pyext/cpp_message.py +49 -0
  42. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/reflection.py +36 -0
  43. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/runtime_version.py +104 -0
  44. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/service_reflection.py +272 -0
  45. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/source_context_pb2.py +37 -0
  46. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/struct_pb2.py +47 -0
  47. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/symbol_database.py +179 -0
  48. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/testdata/__init__.py +0 -0
  49. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/text_encoding.py +106 -0
  50. tool_server/.venv/lib/python3.12/site-packages/google/protobuf/text_format.py +1884 -0
.gitattributes CHANGED
@@ -4041,3 +4041,14 @@ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/pl/LC_MESSAGES/
4041
  tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/pl/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lfs -text
4042
  tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/ro/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4043
  tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/ru/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
4041
  tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/pl/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lfs -text
4042
  tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/ro/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4043
  tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/ru/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4044
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/sr/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4045
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/sr@latin/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4046
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/sv/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4047
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/sv/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lfs -text
4048
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/ta/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lfs -text
4049
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/tr/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lfs -text
4050
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/uk/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4051
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/uk/LC_MESSAGES/iso639-3.mo filter=lfs diff=lfs merge=lfs -text
4052
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/vi/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4053
+ tool_server/.venv/lib/python3.12/site-packages/pycountry/locales/zh_CN/LC_MESSAGES/iso3166-2.mo filter=lfs diff=lfs merge=lfs -text
4054
+ tool_server/.venv/lib/python3.12/site-packages/google/_upb/_message.abi3.so filter=lfs diff=lfs merge=lfs -text
tool_server/.venv/lib/python3.12/site-packages/google/_upb/_message.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f35655ccf4187793dd703dec98446c77f5c6a3158b56346e582b5258648295e
3
+ size 401336
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/__init__.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ # Copyright 2007 Google Inc. All Rights Reserved.
9
+
10
+ __version__ = '6.32.0'
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/any.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains the Any helper APIs."""
9
+
10
+ from typing import Optional, TypeVar
11
+
12
+ from google.protobuf import descriptor
13
+ from google.protobuf.message import Message
14
+
15
+ from google.protobuf.any_pb2 import Any
16
+
17
+
18
+ _MessageT = TypeVar('_MessageT', bound=Message)
19
+
20
+
21
+ def pack(
22
+ msg: Message,
23
+ type_url_prefix: Optional[str] = 'type.googleapis.com/',
24
+ deterministic: Optional[bool] = None,
25
+ ) -> Any:
26
+ any_msg = Any()
27
+ any_msg.Pack(
28
+ msg=msg, type_url_prefix=type_url_prefix, deterministic=deterministic
29
+ )
30
+ return any_msg
31
+
32
+
33
+ def unpack(any_msg: Any, msg: Message) -> bool:
34
+ return any_msg.Unpack(msg=msg)
35
+
36
+
37
+ def unpack_as(any_msg: Any, message_type: type[_MessageT]) -> _MessageT:
38
+ unpacked = message_type()
39
+ if unpack(any_msg, unpacked):
40
+ return unpacked
41
+ else:
42
+ raise TypeError(
43
+ f'Attempted to unpack {type_name(any_msg)} to'
44
+ f' {message_type.__qualname__}'
45
+ )
46
+
47
+
48
+ def type_name(any_msg: Any) -> str:
49
+ return any_msg.TypeName()
50
+
51
+
52
+ def is_type(any_msg: Any, des: descriptor.Descriptor) -> bool:
53
+ return any_msg.Is(des)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/any_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/any.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/any.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"6\n\x03\x41ny\x12\x19\n\x08type_url\x18\x01 \x01(\tR\x07typeUrl\x12\x14\n\x05value\x18\x02 \x01(\x0cR\x05valueBv\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_ANY']._serialized_start=46
36
+ _globals['_ANY']._serialized_end=100
37
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/api_pb2.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/api.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/api.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+ from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
26
+ from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
27
+
28
+
29
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\xdb\x02\n\x03\x41pi\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x31\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.MethodR\x07methods\x12\x31\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12\x45\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContextR\rsourceContext\x12.\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.MixinR\x06mixins\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\x12\x18\n\x07\x65\x64ition\x18\x08 \x01(\tR\x07\x65\x64ition\"\xd4\x02\n\x06Method\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12(\n\x10request_type_url\x18\x02 \x01(\tR\x0erequestTypeUrl\x12+\n\x11request_streaming\x18\x03 \x01(\x08R\x10requestStreaming\x12*\n\x11response_type_url\x18\x04 \x01(\tR\x0fresponseTypeUrl\x12-\n\x12response_streaming\x18\x05 \x01(\x08R\x11responseStreaming\x12\x31\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x33\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxB\x02\x18\x01R\x06syntax\x12\x1c\n\x07\x65\x64ition\x18\x08 \x01(\tB\x02\x18\x01R\x07\x65\x64ition\"/\n\x05Mixin\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x12\n\x04root\x18\x02 \x01(\tR\x04rootBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
30
+
31
+ _globals = globals()
32
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
33
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', _globals)
34
+ if not _descriptor._USE_C_DESCRIPTORS:
35
+ _globals['DESCRIPTOR']._loaded_options = None
36
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
37
+ _globals['_METHOD'].fields_by_name['syntax']._loaded_options = None
38
+ _globals['_METHOD'].fields_by_name['syntax']._serialized_options = b'\030\001'
39
+ _globals['_METHOD'].fields_by_name['edition']._loaded_options = None
40
+ _globals['_METHOD'].fields_by_name['edition']._serialized_options = b'\030\001'
41
+ _globals['_API']._serialized_start=113
42
+ _globals['_API']._serialized_end=460
43
+ _globals['_METHOD']._serialized_start=463
44
+ _globals['_METHOD']._serialized_end=803
45
+ _globals['_MIXIN']._serialized_start=805
46
+ _globals['_MIXIN']._serialized_end=852
47
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/compiler/__init__.py ADDED
File without changes
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/compiler/plugin_pb2.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/compiler/plugin.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/compiler/plugin.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+ from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
26
+
27
+
28
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"c\n\x07Version\x12\x14\n\x05major\x18\x01 \x01(\x05R\x05major\x12\x14\n\x05minor\x18\x02 \x01(\x05R\x05minor\x12\x14\n\x05patch\x18\x03 \x01(\x05R\x05patch\x12\x16\n\x06suffix\x18\x04 \x01(\tR\x06suffix\"\xcf\x02\n\x14\x43odeGeneratorRequest\x12(\n\x10\x66ile_to_generate\x18\x01 \x03(\tR\x0e\x66ileToGenerate\x12\x1c\n\tparameter\x18\x02 \x01(\tR\tparameter\x12\x43\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\tprotoFile\x12\\\n\x17source_file_descriptors\x18\x11 \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\x15sourceFileDescriptors\x12L\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.VersionR\x0f\x63ompilerVersion\"\x85\x04\n\x15\x43odeGeneratorResponse\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12-\n\x12supported_features\x18\x02 \x01(\x04R\x11supportedFeatures\x12\'\n\x0fminimum_edition\x18\x03 \x01(\x05R\x0eminimumEdition\x12\'\n\x0fmaximum_edition\x18\x04 \x01(\x05R\x0emaximumEdition\x12H\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.FileR\x04\x66ile\x1a\xb1\x01\n\x04\x46ile\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\'\n\x0finsertion_point\x18\x02 \x01(\tR\x0einsertionPoint\x12\x18\n\x07\x63ontent\x18\x0f \x01(\tR\x07\x63ontent\x12R\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfoR\x11generatedCodeInfo\"W\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x12\x1d\n\x19\x46\x45\x41TURE_SUPPORTS_EDITIONS\x10\x02\x42r\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb\xaa\x02\x18Google.Protobuf.Compiler')
29
+
30
+ _globals = globals()
31
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
32
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', _globals)
33
+ if not _descriptor._USE_C_DESCRIPTORS:
34
+ _globals['DESCRIPTOR']._loaded_options = None
35
+ _globals['DESCRIPTOR']._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb\252\002\030Google.Protobuf.Compiler'
36
+ _globals['_VERSION']._serialized_start=101
37
+ _globals['_VERSION']._serialized_end=200
38
+ _globals['_CODEGENERATORREQUEST']._serialized_start=203
39
+ _globals['_CODEGENERATORREQUEST']._serialized_end=538
40
+ _globals['_CODEGENERATORRESPONSE']._serialized_start=541
41
+ _globals['_CODEGENERATORRESPONSE']._serialized_end=1058
42
+ _globals['_CODEGENERATORRESPONSE_FILE']._serialized_start=792
43
+ _globals['_CODEGENERATORRESPONSE_FILE']._serialized_end=969
44
+ _globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_start=971
45
+ _globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_end=1058
46
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor.py ADDED
@@ -0,0 +1,1676 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Descriptors essentially contain exactly the information found in a .proto
9
+
10
+ file, in types that make this information accessible in Python.
11
+ """
12
+
13
+ __author__ = 'robinson@google.com (Will Robinson)'
14
+
15
+ import abc
16
+ import binascii
17
+ import os
18
+ import threading
19
+ import warnings
20
+
21
+ from google.protobuf.internal import api_implementation
22
+
23
+ _USE_C_DESCRIPTORS = False
24
+ if api_implementation.Type() != 'python':
25
+ # pylint: disable=protected-access
26
+ _message = api_implementation._c_module
27
+ # TODO: Remove this import after fix api_implementation
28
+ if _message is None:
29
+ from google.protobuf.pyext import _message
30
+ _USE_C_DESCRIPTORS = True
31
+
32
+
33
+ class Error(Exception):
34
+ """Base error for this module."""
35
+
36
+
37
+ class TypeTransformationError(Error):
38
+ """Error transforming between python proto type and corresponding C++ type."""
39
+
40
+
41
+ if _USE_C_DESCRIPTORS:
42
+ # This metaclass allows to override the behavior of code like
43
+ # isinstance(my_descriptor, FieldDescriptor)
44
+ # and make it return True when the descriptor is an instance of the extension
45
+ # type written in C++.
46
+ class DescriptorMetaclass(type):
47
+
48
+ def __instancecheck__(cls, obj):
49
+ if super(DescriptorMetaclass, cls).__instancecheck__(obj):
50
+ return True
51
+ if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
52
+ return True
53
+ return False
54
+
55
+ else:
56
+ # The standard metaclass; nothing changes.
57
+ DescriptorMetaclass = abc.ABCMeta
58
+
59
+
60
+ class _Lock(object):
61
+ """Wrapper class of threading.Lock(), which is allowed by 'with'."""
62
+
63
+ def __new__(cls):
64
+ self = object.__new__(cls)
65
+ self._lock = threading.Lock() # pylint: disable=protected-access
66
+ return self
67
+
68
+ def __enter__(self):
69
+ self._lock.acquire()
70
+
71
+ def __exit__(self, exc_type, exc_value, exc_tb):
72
+ self._lock.release()
73
+
74
+
75
+ _lock = threading.Lock()
76
+
77
+
78
+ def _Deprecated(
79
+ name,
80
+ alternative='get/find descriptors from generated code or query the descriptor_pool',
81
+ ):
82
+ if _Deprecated.count > 0:
83
+ _Deprecated.count -= 1
84
+ warnings.warn(
85
+ 'Call to deprecated %s, use %s instead.' % (name, alternative),
86
+ category=DeprecationWarning,
87
+ stacklevel=3,
88
+ )
89
+
90
+
91
+ # These must match the values in descriptor.proto, but we can't use them
92
+ # directly because we sometimes need to reference them in feature helpers
93
+ # below *during* the build of descriptor.proto.
94
+ _FEATURESET_MESSAGE_ENCODING_DELIMITED = 2
95
+ _FEATURESET_FIELD_PRESENCE_IMPLICIT = 2
96
+ _FEATURESET_FIELD_PRESENCE_LEGACY_REQUIRED = 3
97
+ _FEATURESET_REPEATED_FIELD_ENCODING_PACKED = 1
98
+ _FEATURESET_ENUM_TYPE_CLOSED = 2
99
+
100
+ # Deprecated warnings will print 100 times at most which should be enough for
101
+ # users to notice and do not cause timeout.
102
+ _Deprecated.count = 100
103
+
104
+
105
+ _internal_create_key = object()
106
+
107
+
108
+ class DescriptorBase(metaclass=DescriptorMetaclass):
109
+ """Descriptors base class.
110
+
111
+ This class is the base of all descriptor classes. It provides common options
112
+ related functionality.
113
+
114
+ Attributes:
115
+ has_options: True if the descriptor has non-default options. Usually it is
116
+ not necessary to read this -- just call GetOptions() which will happily
117
+ return the default instance. However, it's sometimes useful for
118
+ efficiency, and also useful inside the protobuf implementation to avoid
119
+ some bootstrapping issues.
120
+ file (FileDescriptor): Reference to file info.
121
+ """
122
+
123
+ if _USE_C_DESCRIPTORS:
124
+ # The class, or tuple of classes, that are considered as "virtual
125
+ # subclasses" of this descriptor class.
126
+ _C_DESCRIPTOR_CLASS = ()
127
+
128
+ def __init__(self, file, options, serialized_options, options_class_name):
129
+ """Initialize the descriptor given its options message and the name of the
130
+
131
+ class of the options message. The name of the class is required in case
132
+ the options message is None and has to be created.
133
+ """
134
+ self._features = None
135
+ self.file = file
136
+ self._original_options = options
137
+ # These two fields are duplicated as a compatibility shim for old gencode
138
+ # that resets them. In 26.x (cl/580304039) we renamed _options to,
139
+ # _loaded_options breaking backwards compatibility.
140
+ self._options = self._loaded_options = None
141
+ self._options_class_name = options_class_name
142
+ self._serialized_options = serialized_options
143
+
144
+ # Does this descriptor have non-default options?
145
+ self.has_options = (self._original_options is not None) or (
146
+ self._serialized_options is not None
147
+ )
148
+
149
+ @property
150
+ @abc.abstractmethod
151
+ def _parent(self):
152
+ pass
153
+
154
+ def _InferLegacyFeatures(self, edition, options, features):
155
+ """Infers features from proto2/proto3 syntax so that editions logic can be used everywhere.
156
+
157
+ Args:
158
+ edition: The edition to infer features for.
159
+ options: The options for this descriptor that are being processed.
160
+ features: The feature set object to modify with inferred features.
161
+ """
162
+ pass
163
+
164
+ def _GetFeatures(self):
165
+ if not self._features:
166
+ self._LazyLoadOptions()
167
+ return self._features
168
+
169
+ def _ResolveFeatures(self, edition, raw_options):
170
+ """Resolves features from the raw options of this descriptor.
171
+
172
+ Args:
173
+ edition: The edition to use for feature defaults.
174
+ raw_options: The options for this descriptor that are being processed.
175
+
176
+ Returns:
177
+ A fully resolved feature set for making runtime decisions.
178
+ """
179
+ # pylint: disable=g-import-not-at-top
180
+ from google.protobuf import descriptor_pb2
181
+
182
+ if self._parent:
183
+ features = descriptor_pb2.FeatureSet()
184
+ features.CopyFrom(self._parent._GetFeatures())
185
+ else:
186
+ features = self.file.pool._CreateDefaultFeatures(edition)
187
+ unresolved = descriptor_pb2.FeatureSet()
188
+ unresolved.CopyFrom(raw_options.features)
189
+ self._InferLegacyFeatures(edition, raw_options, unresolved)
190
+ features.MergeFrom(unresolved)
191
+
192
+ # Use the feature cache to reduce memory bloat.
193
+ return self.file.pool._InternFeatures(features)
194
+
195
+ def _LazyLoadOptions(self):
196
+ """Lazily initializes descriptor options towards the end of the build."""
197
+ if self._options and self._loaded_options == self._options:
198
+ # If neither has been reset by gencode, use the cache.
199
+ return
200
+
201
+ # pylint: disable=g-import-not-at-top
202
+ from google.protobuf import descriptor_pb2
203
+
204
+ if not hasattr(descriptor_pb2, self._options_class_name):
205
+ raise RuntimeError(
206
+ 'Unknown options class name %s!' % self._options_class_name
207
+ )
208
+ options_class = getattr(descriptor_pb2, self._options_class_name)
209
+ features = None
210
+ edition = self.file._edition
211
+
212
+ if not self.has_options:
213
+ if not self._features:
214
+ features = self._ResolveFeatures(
215
+ descriptor_pb2.Edition.Value(edition), options_class()
216
+ )
217
+ with _lock:
218
+ self._options = self._loaded_options = options_class()
219
+ if not self._features:
220
+ self._features = features
221
+ else:
222
+ if not self._serialized_options:
223
+ options = self._original_options
224
+ else:
225
+ options = _ParseOptions(options_class(), self._serialized_options)
226
+
227
+ if not self._features:
228
+ features = self._ResolveFeatures(
229
+ descriptor_pb2.Edition.Value(edition), options
230
+ )
231
+ with _lock:
232
+ self._options = self._loaded_options = options
233
+ if not self._features:
234
+ self._features = features
235
+ if options.HasField('features'):
236
+ options.ClearField('features')
237
+ if not options.SerializeToString():
238
+ self._options = self._loaded_options = options_class()
239
+ self.has_options = False
240
+
241
+ def GetOptions(self):
242
+ """Retrieves descriptor options.
243
+
244
+ Returns:
245
+ The options set on this descriptor.
246
+ """
247
+ # If either has been reset by gencode, reload options.
248
+ if not self._options or not self._loaded_options:
249
+ self._LazyLoadOptions()
250
+ return self._options
251
+
252
+
253
+ class _NestedDescriptorBase(DescriptorBase):
254
+ """Common class for descriptors that can be nested."""
255
+
256
+ def __init__(
257
+ self,
258
+ options,
259
+ options_class_name,
260
+ name,
261
+ full_name,
262
+ file,
263
+ containing_type,
264
+ serialized_start=None,
265
+ serialized_end=None,
266
+ serialized_options=None,
267
+ ):
268
+ """Constructor.
269
+
270
+ Args:
271
+ options: Protocol message options or None to use default message options.
272
+ options_class_name (str): The class name of the above options.
273
+ name (str): Name of this protocol message type.
274
+ full_name (str): Fully-qualified name of this protocol message type, which
275
+ will include protocol "package" name and the name of any enclosing
276
+ types.
277
+ containing_type: if provided, this is a nested descriptor, with this
278
+ descriptor as parent, otherwise None.
279
+ serialized_start: The start index (inclusive) in block in the
280
+ file.serialized_pb that describes this descriptor.
281
+ serialized_end: The end index (exclusive) in block in the
282
+ file.serialized_pb that describes this descriptor.
283
+ serialized_options: Protocol message serialized options or None.
284
+ """
285
+ super(_NestedDescriptorBase, self).__init__(
286
+ file, options, serialized_options, options_class_name
287
+ )
288
+
289
+ self.name = name
290
+ # TODO: Add function to calculate full_name instead of having it in
291
+ # memory?
292
+ self.full_name = full_name
293
+ self.containing_type = containing_type
294
+
295
+ self._serialized_start = serialized_start
296
+ self._serialized_end = serialized_end
297
+
298
+ def CopyToProto(self, proto):
299
+ """Copies this to the matching proto in descriptor_pb2.
300
+
301
+ Args:
302
+ proto: An empty proto instance from descriptor_pb2.
303
+
304
+ Raises:
305
+ Error: If self couldn't be serialized, due to to few constructor
306
+ arguments.
307
+ """
308
+ if (
309
+ self.file is not None
310
+ and self._serialized_start is not None
311
+ and self._serialized_end is not None
312
+ ):
313
+ proto.ParseFromString(
314
+ self.file.serialized_pb[self._serialized_start : self._serialized_end]
315
+ )
316
+ else:
317
+ raise Error('Descriptor does not contain serialization.')
318
+
319
+
320
+ class Descriptor(_NestedDescriptorBase):
321
+ """Descriptor for a protocol message type.
322
+
323
+ Attributes:
324
+ name (str): Name of this protocol message type.
325
+ full_name (str): Fully-qualified name of this protocol message type, which
326
+ will include protocol "package" name and the name of any enclosing
327
+ types.
328
+ containing_type (Descriptor): Reference to the descriptor of the type
329
+ containing us, or None if this is top-level.
330
+ fields (list[FieldDescriptor]): Field descriptors for all fields in this
331
+ type.
332
+ fields_by_number (dict(int, FieldDescriptor)): Same
333
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by
334
+ "number" attribute in each FieldDescriptor.
335
+ fields_by_name (dict(str, FieldDescriptor)): Same :class:`FieldDescriptor`
336
+ objects as in :attr:`fields`, but indexed by "name" attribute in each
337
+ :class:`FieldDescriptor`.
338
+ nested_types (list[Descriptor]): Descriptor references for all protocol
339
+ message types nested within this one.
340
+ nested_types_by_name (dict(str, Descriptor)): Same Descriptor objects as
341
+ in :attr:`nested_types`, but indexed by "name" attribute in each
342
+ Descriptor.
343
+ enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references for
344
+ all enums contained within this type.
345
+ enum_types_by_name (dict(str, EnumDescriptor)): Same
346
+ :class:`EnumDescriptor` objects as in :attr:`enum_types`, but indexed by
347
+ "name" attribute in each EnumDescriptor.
348
+ enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping from
349
+ enum value name to :class:`EnumValueDescriptor` for that value.
350
+ extensions (list[FieldDescriptor]): All extensions defined directly within
351
+ this message type (NOT within a nested type).
352
+ extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor
353
+ objects as :attr:`extensions`, but indexed by "name" attribute of each
354
+ FieldDescriptor.
355
+ is_extendable (bool): Does this type define any extension ranges?
356
+ oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields
357
+ in this message.
358
+ oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in
359
+ :attr:`oneofs`, but indexed by "name" attribute.
360
+ file (FileDescriptor): Reference to file descriptor.
361
+ is_map_entry: If the message type is a map entry.
362
+ """
363
+
364
+ if _USE_C_DESCRIPTORS:
365
+ _C_DESCRIPTOR_CLASS = _message.Descriptor
366
+
367
+ def __new__(
368
+ cls,
369
+ name=None,
370
+ full_name=None,
371
+ filename=None,
372
+ containing_type=None,
373
+ fields=None,
374
+ nested_types=None,
375
+ enum_types=None,
376
+ extensions=None,
377
+ options=None,
378
+ serialized_options=None,
379
+ is_extendable=True,
380
+ extension_ranges=None,
381
+ oneofs=None,
382
+ file=None, # pylint: disable=redefined-builtin
383
+ serialized_start=None,
384
+ serialized_end=None,
385
+ syntax=None,
386
+ is_map_entry=False,
387
+ create_key=None,
388
+ ):
389
+ _message.Message._CheckCalledFromGeneratedFile()
390
+ return _message.default_pool.FindMessageTypeByName(full_name)
391
+
392
+ # NOTE: The file argument redefining a builtin is nothing we can
393
+ # fix right now since we don't know how many clients already rely on the
394
+ # name of the argument.
395
+ def __init__(
396
+ self,
397
+ name,
398
+ full_name,
399
+ filename,
400
+ containing_type,
401
+ fields,
402
+ nested_types,
403
+ enum_types,
404
+ extensions,
405
+ options=None,
406
+ serialized_options=None,
407
+ is_extendable=True,
408
+ extension_ranges=None,
409
+ oneofs=None,
410
+ file=None,
411
+ serialized_start=None,
412
+ serialized_end=None, # pylint: disable=redefined-builtin
413
+ syntax=None,
414
+ is_map_entry=False,
415
+ create_key=None,
416
+ ):
417
+ """Arguments to __init__() are as described in the description
418
+
419
+ of Descriptor fields above.
420
+
421
+ Note that filename is an obsolete argument, that is not used anymore.
422
+ Please use file.name to access this as an attribute.
423
+ """
424
+ if create_key is not _internal_create_key:
425
+ _Deprecated('create function Descriptor()')
426
+
427
+ super(Descriptor, self).__init__(
428
+ options,
429
+ 'MessageOptions',
430
+ name,
431
+ full_name,
432
+ file,
433
+ containing_type,
434
+ serialized_start=serialized_start,
435
+ serialized_end=serialized_end,
436
+ serialized_options=serialized_options,
437
+ )
438
+
439
+ # We have fields in addition to fields_by_name and fields_by_number,
440
+ # so that:
441
+ # 1. Clients can index fields by "order in which they're listed."
442
+ # 2. Clients can easily iterate over all fields with the terse
443
+ # syntax: for f in descriptor.fields: ...
444
+ self.fields = fields
445
+ for field in self.fields:
446
+ field.containing_type = self
447
+ field.file = file
448
+ self.fields_by_number = dict((f.number, f) for f in fields)
449
+ self.fields_by_name = dict((f.name, f) for f in fields)
450
+ self._fields_by_camelcase_name = None
451
+
452
+ self.nested_types = nested_types
453
+ for nested_type in nested_types:
454
+ nested_type.containing_type = self
455
+ self.nested_types_by_name = dict((t.name, t) for t in nested_types)
456
+
457
+ self.enum_types = enum_types
458
+ for enum_type in self.enum_types:
459
+ enum_type.containing_type = self
460
+ self.enum_types_by_name = dict((t.name, t) for t in enum_types)
461
+ self.enum_values_by_name = dict(
462
+ (v.name, v) for t in enum_types for v in t.values
463
+ )
464
+
465
+ self.extensions = extensions
466
+ for extension in self.extensions:
467
+ extension.extension_scope = self
468
+ self.extensions_by_name = dict((f.name, f) for f in extensions)
469
+ self.is_extendable = is_extendable
470
+ self.extension_ranges = extension_ranges
471
+ self.oneofs = oneofs if oneofs is not None else []
472
+ self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
473
+ for oneof in self.oneofs:
474
+ oneof.containing_type = self
475
+ oneof.file = file
476
+ self._is_map_entry = is_map_entry
477
+
478
+ @property
479
+ def _parent(self):
480
+ return self.containing_type or self.file
481
+
482
+ @property
483
+ def fields_by_camelcase_name(self):
484
+ """Same FieldDescriptor objects as in :attr:`fields`, but indexed by
485
+
486
+ :attr:`FieldDescriptor.camelcase_name`.
487
+ """
488
+ if self._fields_by_camelcase_name is None:
489
+ self._fields_by_camelcase_name = dict(
490
+ (f.camelcase_name, f) for f in self.fields
491
+ )
492
+ return self._fields_by_camelcase_name
493
+
494
+ def EnumValueName(self, enum, value):
495
+ """Returns the string name of an enum value.
496
+
497
+ This is just a small helper method to simplify a common operation.
498
+
499
+ Args:
500
+ enum: string name of the Enum.
501
+ value: int, value of the enum.
502
+
503
+ Returns:
504
+ string name of the enum value.
505
+
506
+ Raises:
507
+ KeyError if either the Enum doesn't exist or the value is not a valid
508
+ value for the enum.
509
+ """
510
+ return self.enum_types_by_name[enum].values_by_number[value].name
511
+
512
+ def CopyToProto(self, proto):
513
+ """Copies this to a descriptor_pb2.DescriptorProto.
514
+
515
+ Args:
516
+ proto: An empty descriptor_pb2.DescriptorProto.
517
+ """
518
+ # This function is overridden to give a better doc comment.
519
+ super(Descriptor, self).CopyToProto(proto)
520
+
521
+
522
+ # TODO: We should have aggressive checking here,
523
+ # for example:
524
+ # * If you specify a repeated field, you should not be allowed
525
+ # to specify a default value.
526
+ # * [Other examples here as needed].
527
+ #
528
+ # TODO: for this and other *Descriptor classes, we
529
+ # might also want to lock things down aggressively (e.g.,
530
+ # prevent clients from setting the attributes). Having
531
+ # stronger invariants here in general will reduce the number
532
+ # of runtime checks we must do in reflection.py...
533
+ class FieldDescriptor(DescriptorBase):
534
+ """Descriptor for a single field in a .proto file.
535
+
536
+ Attributes:
537
+ name (str): Name of this field, exactly as it appears in .proto.
538
+ full_name (str): Name of this field, including containing scope. This is
539
+ particularly relevant for extensions.
540
+ index (int): Dense, 0-indexed index giving the order that this field
541
+ textually appears within its message in the .proto file.
542
+ number (int): Tag number declared for this field in the .proto file.
543
+ type (int): (One of the TYPE_* constants below) Declared type.
544
+ cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to
545
+ represent this field.
546
+ label (int): (One of the LABEL_* constants below) Tells whether this field
547
+ is optional, required, or repeated.
548
+ has_default_value (bool): True if this field has a default value defined,
549
+ otherwise false.
550
+ default_value (Varies): Default value of this field. Only meaningful for
551
+ non-repeated scalar fields. Repeated fields should always set this to [],
552
+ and non-repeated composite fields should always set this to None.
553
+ containing_type (Descriptor): Descriptor of the protocol message type that
554
+ contains this field. Set by the Descriptor constructor if we're passed
555
+ into one. Somewhat confusingly, for extension fields, this is the
556
+ descriptor of the EXTENDED message, not the descriptor of the message
557
+ containing this field. (See is_extension and extension_scope below).
558
+ message_type (Descriptor): If a composite field, a descriptor of the message
559
+ type contained in this field. Otherwise, this is None.
560
+ enum_type (EnumDescriptor): If this field contains an enum, a descriptor of
561
+ that enum. Otherwise, this is None.
562
+ is_extension: True iff this describes an extension field.
563
+ extension_scope (Descriptor): Only meaningful if is_extension is True. Gives
564
+ the message that immediately contains this extension field. Will be None
565
+ iff we're a top-level (file-level) extension field.
566
+ options (descriptor_pb2.FieldOptions): Protocol message field options or
567
+ None to use default field options.
568
+ containing_oneof (OneofDescriptor): If the field is a member of a oneof
569
+ union, contains its descriptor. Otherwise, None.
570
+ file (FileDescriptor): Reference to file descriptor.
571
+ """
572
+
573
+ # Must be consistent with C++ FieldDescriptor::Type enum in
574
+ # descriptor.h.
575
+ #
576
+ # TODO: Find a way to eliminate this repetition.
577
+ TYPE_DOUBLE = 1
578
+ TYPE_FLOAT = 2
579
+ TYPE_INT64 = 3
580
+ TYPE_UINT64 = 4
581
+ TYPE_INT32 = 5
582
+ TYPE_FIXED64 = 6
583
+ TYPE_FIXED32 = 7
584
+ TYPE_BOOL = 8
585
+ TYPE_STRING = 9
586
+ TYPE_GROUP = 10
587
+ TYPE_MESSAGE = 11
588
+ TYPE_BYTES = 12
589
+ TYPE_UINT32 = 13
590
+ TYPE_ENUM = 14
591
+ TYPE_SFIXED32 = 15
592
+ TYPE_SFIXED64 = 16
593
+ TYPE_SINT32 = 17
594
+ TYPE_SINT64 = 18
595
+ MAX_TYPE = 18
596
+
597
+ # Must be consistent with C++ FieldDescriptor::CppType enum in
598
+ # descriptor.h.
599
+ #
600
+ # TODO: Find a way to eliminate this repetition.
601
+ CPPTYPE_INT32 = 1
602
+ CPPTYPE_INT64 = 2
603
+ CPPTYPE_UINT32 = 3
604
+ CPPTYPE_UINT64 = 4
605
+ CPPTYPE_DOUBLE = 5
606
+ CPPTYPE_FLOAT = 6
607
+ CPPTYPE_BOOL = 7
608
+ CPPTYPE_ENUM = 8
609
+ CPPTYPE_STRING = 9
610
+ CPPTYPE_MESSAGE = 10
611
+ MAX_CPPTYPE = 10
612
+
613
+ _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
614
+ TYPE_DOUBLE: CPPTYPE_DOUBLE,
615
+ TYPE_FLOAT: CPPTYPE_FLOAT,
616
+ TYPE_ENUM: CPPTYPE_ENUM,
617
+ TYPE_INT64: CPPTYPE_INT64,
618
+ TYPE_SINT64: CPPTYPE_INT64,
619
+ TYPE_SFIXED64: CPPTYPE_INT64,
620
+ TYPE_UINT64: CPPTYPE_UINT64,
621
+ TYPE_FIXED64: CPPTYPE_UINT64,
622
+ TYPE_INT32: CPPTYPE_INT32,
623
+ TYPE_SFIXED32: CPPTYPE_INT32,
624
+ TYPE_SINT32: CPPTYPE_INT32,
625
+ TYPE_UINT32: CPPTYPE_UINT32,
626
+ TYPE_FIXED32: CPPTYPE_UINT32,
627
+ TYPE_BYTES: CPPTYPE_STRING,
628
+ TYPE_STRING: CPPTYPE_STRING,
629
+ TYPE_BOOL: CPPTYPE_BOOL,
630
+ TYPE_MESSAGE: CPPTYPE_MESSAGE,
631
+ TYPE_GROUP: CPPTYPE_MESSAGE,
632
+ }
633
+
634
+ # Must be consistent with C++ FieldDescriptor::Label enum in
635
+ # descriptor.h.
636
+ #
637
+ # TODO: Find a way to eliminate this repetition.
638
+ LABEL_OPTIONAL = 1
639
+ LABEL_REQUIRED = 2
640
+ LABEL_REPEATED = 3
641
+ MAX_LABEL = 3
642
+
643
+ # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
644
+ # and kLastReservedNumber in descriptor.h
645
+ MAX_FIELD_NUMBER = (1 << 29) - 1
646
+ FIRST_RESERVED_FIELD_NUMBER = 19000
647
+ LAST_RESERVED_FIELD_NUMBER = 19999
648
+
649
+ if _USE_C_DESCRIPTORS:
650
+ _C_DESCRIPTOR_CLASS = _message.FieldDescriptor
651
+
652
+ def __new__(
653
+ cls,
654
+ name,
655
+ full_name,
656
+ index,
657
+ number,
658
+ type,
659
+ cpp_type,
660
+ label,
661
+ default_value,
662
+ message_type,
663
+ enum_type,
664
+ containing_type,
665
+ is_extension,
666
+ extension_scope,
667
+ options=None,
668
+ serialized_options=None,
669
+ has_default_value=True,
670
+ containing_oneof=None,
671
+ json_name=None,
672
+ file=None,
673
+ create_key=None,
674
+ ): # pylint: disable=redefined-builtin
675
+ _message.Message._CheckCalledFromGeneratedFile()
676
+ if is_extension:
677
+ return _message.default_pool.FindExtensionByName(full_name)
678
+ else:
679
+ return _message.default_pool.FindFieldByName(full_name)
680
+
681
+ def __init__(
682
+ self,
683
+ name,
684
+ full_name,
685
+ index,
686
+ number,
687
+ type,
688
+ cpp_type,
689
+ label,
690
+ default_value,
691
+ message_type,
692
+ enum_type,
693
+ containing_type,
694
+ is_extension,
695
+ extension_scope,
696
+ options=None,
697
+ serialized_options=None,
698
+ has_default_value=True,
699
+ containing_oneof=None,
700
+ json_name=None,
701
+ file=None,
702
+ create_key=None,
703
+ ): # pylint: disable=redefined-builtin
704
+ """The arguments are as described in the description of FieldDescriptor
705
+
706
+ attributes above.
707
+
708
+ Note that containing_type may be None, and may be set later if necessary
709
+ (to deal with circular references between message types, for example).
710
+ Likewise for extension_scope.
711
+ """
712
+ if create_key is not _internal_create_key:
713
+ _Deprecated('create function FieldDescriptor()')
714
+
715
+ super(FieldDescriptor, self).__init__(
716
+ file, options, serialized_options, 'FieldOptions'
717
+ )
718
+ self.name = name
719
+ self.full_name = full_name
720
+ self._camelcase_name = None
721
+ if json_name is None:
722
+ self.json_name = _ToJsonName(name)
723
+ else:
724
+ self.json_name = json_name
725
+ self.index = index
726
+ self.number = number
727
+ self._type = type
728
+ self.cpp_type = cpp_type
729
+ self._label = label
730
+ self.has_default_value = has_default_value
731
+ self.default_value = default_value
732
+ self.containing_type = containing_type
733
+ self.message_type = message_type
734
+ self.enum_type = enum_type
735
+ self.is_extension = is_extension
736
+ self.extension_scope = extension_scope
737
+ self.containing_oneof = containing_oneof
738
+ if api_implementation.Type() == 'python':
739
+ self._cdescriptor = None
740
+ else:
741
+ if is_extension:
742
+ self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
743
+ else:
744
+ self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
745
+
746
+ @property
747
+ def _parent(self):
748
+ if self.containing_oneof:
749
+ return self.containing_oneof
750
+ if self.is_extension:
751
+ return self.extension_scope or self.file
752
+ return self.containing_type
753
+
754
+ def _InferLegacyFeatures(self, edition, options, features):
755
+ # pylint: disable=g-import-not-at-top
756
+ from google.protobuf import descriptor_pb2
757
+
758
+ if edition >= descriptor_pb2.Edition.EDITION_2023:
759
+ return
760
+
761
+ if self._label == FieldDescriptor.LABEL_REQUIRED:
762
+ features.field_presence = (
763
+ descriptor_pb2.FeatureSet.FieldPresence.LEGACY_REQUIRED
764
+ )
765
+
766
+ if self._type == FieldDescriptor.TYPE_GROUP:
767
+ features.message_encoding = (
768
+ descriptor_pb2.FeatureSet.MessageEncoding.DELIMITED
769
+ )
770
+
771
+ if options.HasField('packed'):
772
+ features.repeated_field_encoding = (
773
+ descriptor_pb2.FeatureSet.RepeatedFieldEncoding.PACKED
774
+ if options.packed
775
+ else descriptor_pb2.FeatureSet.RepeatedFieldEncoding.EXPANDED
776
+ )
777
+
778
+ @property
779
+ def type(self):
780
+ if (
781
+ self._GetFeatures().message_encoding
782
+ == _FEATURESET_MESSAGE_ENCODING_DELIMITED
783
+ and self.message_type
784
+ and not self.message_type.GetOptions().map_entry
785
+ and not self.containing_type.GetOptions().map_entry
786
+ ):
787
+ return FieldDescriptor.TYPE_GROUP
788
+ return self._type
789
+
790
+ @type.setter
791
+ def type(self, val):
792
+ self._type = val
793
+
794
+ @property
795
+ def label(self):
796
+ _Deprecated('label property', 'is_required or is_repeated properties')
797
+
798
+ if (
799
+ self._GetFeatures().field_presence
800
+ == _FEATURESET_FIELD_PRESENCE_LEGACY_REQUIRED
801
+ ):
802
+ return FieldDescriptor.LABEL_REQUIRED
803
+ return self._label
804
+
805
+ @property
806
+ def is_required(self):
807
+ """Returns if the field is required."""
808
+ return (
809
+ self._GetFeatures().field_presence
810
+ == _FEATURESET_FIELD_PRESENCE_LEGACY_REQUIRED
811
+ )
812
+
813
+ @property
814
+ def is_repeated(self):
815
+ """Returns if the field is repeated."""
816
+ return self._label == FieldDescriptor.LABEL_REPEATED
817
+
818
+ @property
819
+ def camelcase_name(self):
820
+ """Camelcase name of this field.
821
+
822
+ Returns:
823
+ str: the name in CamelCase.
824
+ """
825
+ if self._camelcase_name is None:
826
+ self._camelcase_name = _ToCamelCase(self.name)
827
+ return self._camelcase_name
828
+
829
+ @property
830
+ def has_presence(self):
831
+ """Whether the field distinguishes between unpopulated and default values.
832
+
833
+ Raises:
834
+ RuntimeError: singular field that is not linked with message nor file.
835
+ """
836
+ if self.is_repeated:
837
+ return False
838
+ if (
839
+ self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE
840
+ or self.is_extension
841
+ or self.containing_oneof
842
+ ):
843
+ return True
844
+
845
+ return (
846
+ self._GetFeatures().field_presence
847
+ != _FEATURESET_FIELD_PRESENCE_IMPLICIT
848
+ )
849
+
850
+ @property
851
+ def is_packed(self):
852
+ """Returns if the field is packed."""
853
+ if not self.is_repeated:
854
+ return False
855
+ field_type = self.type
856
+ if (
857
+ field_type == FieldDescriptor.TYPE_STRING
858
+ or field_type == FieldDescriptor.TYPE_GROUP
859
+ or field_type == FieldDescriptor.TYPE_MESSAGE
860
+ or field_type == FieldDescriptor.TYPE_BYTES
861
+ ):
862
+ return False
863
+
864
+ return (
865
+ self._GetFeatures().repeated_field_encoding
866
+ == _FEATURESET_REPEATED_FIELD_ENCODING_PACKED
867
+ )
868
+
869
+ @staticmethod
870
+ def ProtoTypeToCppProtoType(proto_type):
871
+ """Converts from a Python proto type to a C++ Proto Type.
872
+
873
+ The Python ProtocolBuffer classes specify both the 'Python' datatype and the
874
+ 'C++' datatype - and they're not the same. This helper method should
875
+ translate from one to another.
876
+
877
+ Args:
878
+ proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
879
+
880
+ Returns:
881
+ int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
882
+ Raises:
883
+ TypeTransformationError: when the Python proto type isn't known.
884
+ """
885
+ try:
886
+ return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
887
+ except KeyError:
888
+ raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
889
+
890
+
891
+ class EnumDescriptor(_NestedDescriptorBase):
892
+ """Descriptor for an enum defined in a .proto file.
893
+
894
+ Attributes:
895
+ name (str): Name of the enum type.
896
+ full_name (str): Full name of the type, including package name and any
897
+ enclosing type(s).
898
+ values (list[EnumValueDescriptor]): List of the values in this enum.
899
+ values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`, but
900
+ indexed by the "name" field of each EnumValueDescriptor.
901
+ values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`,
902
+ but indexed by the "number" field of each EnumValueDescriptor.
903
+ containing_type (Descriptor): Descriptor of the immediate containing type of
904
+ this enum, or None if this is an enum defined at the top level in a .proto
905
+ file. Set by Descriptor's constructor if we're passed into one.
906
+ file (FileDescriptor): Reference to file descriptor.
907
+ options (descriptor_pb2.EnumOptions): Enum options message or None to use
908
+ default enum options.
909
+ """
910
+
911
+ if _USE_C_DESCRIPTORS:
912
+ _C_DESCRIPTOR_CLASS = _message.EnumDescriptor
913
+
914
+ def __new__(
915
+ cls,
916
+ name,
917
+ full_name,
918
+ filename,
919
+ values,
920
+ containing_type=None,
921
+ options=None,
922
+ serialized_options=None,
923
+ file=None, # pylint: disable=redefined-builtin
924
+ serialized_start=None,
925
+ serialized_end=None,
926
+ create_key=None,
927
+ ):
928
+ _message.Message._CheckCalledFromGeneratedFile()
929
+ return _message.default_pool.FindEnumTypeByName(full_name)
930
+
931
+ def __init__(
932
+ self,
933
+ name,
934
+ full_name,
935
+ filename,
936
+ values,
937
+ containing_type=None,
938
+ options=None,
939
+ serialized_options=None,
940
+ file=None, # pylint: disable=redefined-builtin
941
+ serialized_start=None,
942
+ serialized_end=None,
943
+ create_key=None,
944
+ ):
945
+ """Arguments are as described in the attribute description above.
946
+
947
+ Note that filename is an obsolete argument, that is not used anymore.
948
+ Please use file.name to access this as an attribute.
949
+ """
950
+ if create_key is not _internal_create_key:
951
+ _Deprecated('create function EnumDescriptor()')
952
+
953
+ super(EnumDescriptor, self).__init__(
954
+ options,
955
+ 'EnumOptions',
956
+ name,
957
+ full_name,
958
+ file,
959
+ containing_type,
960
+ serialized_start=serialized_start,
961
+ serialized_end=serialized_end,
962
+ serialized_options=serialized_options,
963
+ )
964
+
965
+ self.values = values
966
+ for value in self.values:
967
+ value.file = file
968
+ value.type = self
969
+ self.values_by_name = dict((v.name, v) for v in values)
970
+ # Values are reversed to ensure that the first alias is retained.
971
+ self.values_by_number = dict((v.number, v) for v in reversed(values))
972
+
973
+ @property
974
+ def _parent(self):
975
+ return self.containing_type or self.file
976
+
977
+ @property
978
+ def is_closed(self):
979
+ """Returns true whether this is a "closed" enum.
980
+
981
+ This means that it:
982
+ - Has a fixed set of values, rather than being equivalent to an int32.
983
+ - Encountering values not in this set causes them to be treated as unknown
984
+ fields.
985
+ - The first value (i.e., the default) may be nonzero.
986
+
987
+ WARNING: Some runtimes currently have a quirk where non-closed enums are
988
+ treated as closed when used as the type of fields defined in a
989
+ `syntax = proto2;` file. This quirk is not present in all runtimes; as of
990
+ writing, we know that:
991
+
992
+ - C++, Java, and C++-based Python share this quirk.
993
+ - UPB and UPB-based Python do not.
994
+ - PHP and Ruby treat all enums as open regardless of declaration.
995
+
996
+ Care should be taken when using this function to respect the target
997
+ runtime's enum handling quirks.
998
+ """
999
+ return self._GetFeatures().enum_type == _FEATURESET_ENUM_TYPE_CLOSED
1000
+
1001
+ def CopyToProto(self, proto):
1002
+ """Copies this to a descriptor_pb2.EnumDescriptorProto.
1003
+
1004
+ Args:
1005
+ proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto.
1006
+ """
1007
+ # This function is overridden to give a better doc comment.
1008
+ super(EnumDescriptor, self).CopyToProto(proto)
1009
+
1010
+
1011
+ class EnumValueDescriptor(DescriptorBase):
1012
+ """Descriptor for a single value within an enum.
1013
+
1014
+ Attributes:
1015
+ name (str): Name of this value.
1016
+ index (int): Dense, 0-indexed index giving the order that this value appears
1017
+ textually within its enum in the .proto file.
1018
+ number (int): Actual number assigned to this enum value.
1019
+ type (EnumDescriptor): :class:`EnumDescriptor` to which this value belongs.
1020
+ Set by :class:`EnumDescriptor`'s constructor if we're passed into one.
1021
+ options (descriptor_pb2.EnumValueOptions): Enum value options message or
1022
+ None to use default enum value options options.
1023
+ """
1024
+
1025
+ if _USE_C_DESCRIPTORS:
1026
+ _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
1027
+
1028
+ def __new__(
1029
+ cls,
1030
+ name,
1031
+ index,
1032
+ number,
1033
+ type=None, # pylint: disable=redefined-builtin
1034
+ options=None,
1035
+ serialized_options=None,
1036
+ create_key=None,
1037
+ ):
1038
+ _message.Message._CheckCalledFromGeneratedFile()
1039
+ # There is no way we can build a complete EnumValueDescriptor with the
1040
+ # given parameters (the name of the Enum is not known, for example).
1041
+ # Fortunately generated files just pass it to the EnumDescriptor()
1042
+ # constructor, which will ignore it, so returning None is good enough.
1043
+ return None
1044
+
1045
+ def __init__(
1046
+ self,
1047
+ name,
1048
+ index,
1049
+ number,
1050
+ type=None, # pylint: disable=redefined-builtin
1051
+ options=None,
1052
+ serialized_options=None,
1053
+ create_key=None,
1054
+ ):
1055
+ """Arguments are as described in the attribute description above."""
1056
+ if create_key is not _internal_create_key:
1057
+ _Deprecated('create function EnumValueDescriptor()')
1058
+
1059
+ super(EnumValueDescriptor, self).__init__(
1060
+ type.file if type else None,
1061
+ options,
1062
+ serialized_options,
1063
+ 'EnumValueOptions',
1064
+ )
1065
+ self.name = name
1066
+ self.index = index
1067
+ self.number = number
1068
+ self.type = type
1069
+
1070
+ @property
1071
+ def _parent(self):
1072
+ return self.type
1073
+
1074
+
1075
+ class OneofDescriptor(DescriptorBase):
1076
+ """Descriptor for a oneof field.
1077
+
1078
+ Attributes:
1079
+ name (str): Name of the oneof field.
1080
+ full_name (str): Full name of the oneof field, including package name.
1081
+ index (int): 0-based index giving the order of the oneof field inside its
1082
+ containing type.
1083
+ containing_type (Descriptor): :class:`Descriptor` of the protocol message
1084
+ type that contains this field. Set by the :class:`Descriptor` constructor
1085
+ if we're passed into one.
1086
+ fields (list[FieldDescriptor]): The list of field descriptors this oneof can
1087
+ contain.
1088
+ """
1089
+
1090
+ if _USE_C_DESCRIPTORS:
1091
+ _C_DESCRIPTOR_CLASS = _message.OneofDescriptor
1092
+
1093
+ def __new__(
1094
+ cls,
1095
+ name,
1096
+ full_name,
1097
+ index,
1098
+ containing_type,
1099
+ fields,
1100
+ options=None,
1101
+ serialized_options=None,
1102
+ create_key=None,
1103
+ ):
1104
+ _message.Message._CheckCalledFromGeneratedFile()
1105
+ return _message.default_pool.FindOneofByName(full_name)
1106
+
1107
+ def __init__(
1108
+ self,
1109
+ name,
1110
+ full_name,
1111
+ index,
1112
+ containing_type,
1113
+ fields,
1114
+ options=None,
1115
+ serialized_options=None,
1116
+ create_key=None,
1117
+ ):
1118
+ """Arguments are as described in the attribute description above."""
1119
+ if create_key is not _internal_create_key:
1120
+ _Deprecated('create function OneofDescriptor()')
1121
+
1122
+ super(OneofDescriptor, self).__init__(
1123
+ containing_type.file if containing_type else None,
1124
+ options,
1125
+ serialized_options,
1126
+ 'OneofOptions',
1127
+ )
1128
+ self.name = name
1129
+ self.full_name = full_name
1130
+ self.index = index
1131
+ self.containing_type = containing_type
1132
+ self.fields = fields
1133
+
1134
+ @property
1135
+ def _parent(self):
1136
+ return self.containing_type
1137
+
1138
+
1139
+ class ServiceDescriptor(_NestedDescriptorBase):
1140
+ """Descriptor for a service.
1141
+
1142
+ Attributes:
1143
+ name (str): Name of the service.
1144
+ full_name (str): Full name of the service, including package name.
1145
+ index (int): 0-indexed index giving the order that this services definition
1146
+ appears within the .proto file.
1147
+ methods (list[MethodDescriptor]): List of methods provided by this service.
1148
+ methods_by_name (dict(str, MethodDescriptor)): Same
1149
+ :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but
1150
+ indexed by "name" attribute in each :class:`MethodDescriptor`.
1151
+ options (descriptor_pb2.ServiceOptions): Service options message or None to
1152
+ use default service options.
1153
+ file (FileDescriptor): Reference to file info.
1154
+ """
1155
+
1156
+ if _USE_C_DESCRIPTORS:
1157
+ _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
1158
+
1159
+ def __new__(
1160
+ cls,
1161
+ name=None,
1162
+ full_name=None,
1163
+ index=None,
1164
+ methods=None,
1165
+ options=None,
1166
+ serialized_options=None,
1167
+ file=None, # pylint: disable=redefined-builtin
1168
+ serialized_start=None,
1169
+ serialized_end=None,
1170
+ create_key=None,
1171
+ ):
1172
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
1173
+ return _message.default_pool.FindServiceByName(full_name)
1174
+
1175
+ def __init__(
1176
+ self,
1177
+ name,
1178
+ full_name,
1179
+ index,
1180
+ methods,
1181
+ options=None,
1182
+ serialized_options=None,
1183
+ file=None, # pylint: disable=redefined-builtin
1184
+ serialized_start=None,
1185
+ serialized_end=None,
1186
+ create_key=None,
1187
+ ):
1188
+ if create_key is not _internal_create_key:
1189
+ _Deprecated('create function ServiceDescriptor()')
1190
+
1191
+ super(ServiceDescriptor, self).__init__(
1192
+ options,
1193
+ 'ServiceOptions',
1194
+ name,
1195
+ full_name,
1196
+ file,
1197
+ None,
1198
+ serialized_start=serialized_start,
1199
+ serialized_end=serialized_end,
1200
+ serialized_options=serialized_options,
1201
+ )
1202
+ self.index = index
1203
+ self.methods = methods
1204
+ self.methods_by_name = dict((m.name, m) for m in methods)
1205
+ # Set the containing service for each method in this service.
1206
+ for method in self.methods:
1207
+ method.file = self.file
1208
+ method.containing_service = self
1209
+
1210
+ @property
1211
+ def _parent(self):
1212
+ return self.file
1213
+
1214
+ def FindMethodByName(self, name):
1215
+ """Searches for the specified method, and returns its descriptor.
1216
+
1217
+ Args:
1218
+ name (str): Name of the method.
1219
+
1220
+ Returns:
1221
+ MethodDescriptor: The descriptor for the requested method.
1222
+
1223
+ Raises:
1224
+ KeyError: if the method cannot be found in the service.
1225
+ """
1226
+ return self.methods_by_name[name]
1227
+
1228
+ def CopyToProto(self, proto):
1229
+ """Copies this to a descriptor_pb2.ServiceDescriptorProto.
1230
+
1231
+ Args:
1232
+ proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto.
1233
+ """
1234
+ # This function is overridden to give a better doc comment.
1235
+ super(ServiceDescriptor, self).CopyToProto(proto)
1236
+
1237
+
1238
+ class MethodDescriptor(DescriptorBase):
1239
+ """Descriptor for a method in a service.
1240
+
1241
+ Attributes:
1242
+ name (str): Name of the method within the service.
1243
+ full_name (str): Full name of method.
1244
+ index (int): 0-indexed index of the method inside the service.
1245
+ containing_service (ServiceDescriptor): The service that contains this
1246
+ method.
1247
+ input_type (Descriptor): The descriptor of the message that this method
1248
+ accepts.
1249
+ output_type (Descriptor): The descriptor of the message that this method
1250
+ returns.
1251
+ client_streaming (bool): Whether this method uses client streaming.
1252
+ server_streaming (bool): Whether this method uses server streaming.
1253
+ options (descriptor_pb2.MethodOptions or None): Method options message, or
1254
+ None to use default method options.
1255
+ """
1256
+
1257
+ if _USE_C_DESCRIPTORS:
1258
+ _C_DESCRIPTOR_CLASS = _message.MethodDescriptor
1259
+
1260
+ def __new__(
1261
+ cls,
1262
+ name,
1263
+ full_name,
1264
+ index,
1265
+ containing_service,
1266
+ input_type,
1267
+ output_type,
1268
+ client_streaming=False,
1269
+ server_streaming=False,
1270
+ options=None,
1271
+ serialized_options=None,
1272
+ create_key=None,
1273
+ ):
1274
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
1275
+ return _message.default_pool.FindMethodByName(full_name)
1276
+
1277
+ def __init__(
1278
+ self,
1279
+ name,
1280
+ full_name,
1281
+ index,
1282
+ containing_service,
1283
+ input_type,
1284
+ output_type,
1285
+ client_streaming=False,
1286
+ server_streaming=False,
1287
+ options=None,
1288
+ serialized_options=None,
1289
+ create_key=None,
1290
+ ):
1291
+ """The arguments are as described in the description of MethodDescriptor
1292
+
1293
+ attributes above.
1294
+
1295
+ Note that containing_service may be None, and may be set later if necessary.
1296
+ """
1297
+ if create_key is not _internal_create_key:
1298
+ _Deprecated('create function MethodDescriptor()')
1299
+
1300
+ super(MethodDescriptor, self).__init__(
1301
+ containing_service.file if containing_service else None,
1302
+ options,
1303
+ serialized_options,
1304
+ 'MethodOptions',
1305
+ )
1306
+ self.name = name
1307
+ self.full_name = full_name
1308
+ self.index = index
1309
+ self.containing_service = containing_service
1310
+ self.input_type = input_type
1311
+ self.output_type = output_type
1312
+ self.client_streaming = client_streaming
1313
+ self.server_streaming = server_streaming
1314
+
1315
+ @property
1316
+ def _parent(self):
1317
+ return self.containing_service
1318
+
1319
+ def CopyToProto(self, proto):
1320
+ """Copies this to a descriptor_pb2.MethodDescriptorProto.
1321
+
1322
+ Args:
1323
+ proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto.
1324
+
1325
+ Raises:
1326
+ Error: If self couldn't be serialized, due to too few constructor
1327
+ arguments.
1328
+ """
1329
+ if self.containing_service is not None:
1330
+ from google.protobuf import descriptor_pb2
1331
+
1332
+ service_proto = descriptor_pb2.ServiceDescriptorProto()
1333
+ self.containing_service.CopyToProto(service_proto)
1334
+ proto.CopyFrom(service_proto.method[self.index])
1335
+ else:
1336
+ raise Error('Descriptor does not contain a service.')
1337
+
1338
+
1339
+ class FileDescriptor(DescriptorBase):
1340
+ """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
1341
+
1342
+ Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and
1343
+ :attr:`dependencies` fields are only set by the
1344
+ :py:mod:`google.protobuf.message_factory` module, and not by the generated
1345
+ proto code.
1346
+
1347
+ Attributes:
1348
+ name (str): Name of file, relative to root of source tree.
1349
+ package (str): Name of the package
1350
+ edition (Edition): Enum value indicating edition of the file
1351
+ serialized_pb (bytes): Byte string of serialized
1352
+ :class:`descriptor_pb2.FileDescriptorProto`.
1353
+ dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor`
1354
+ objects this :class:`FileDescriptor` depends on.
1355
+ public_dependencies (list[FileDescriptor]): A subset of
1356
+ :attr:`dependencies`, which were declared as "public".
1357
+ message_types_by_name (dict(str, Descriptor)): Mapping from message names to
1358
+ their :class:`Descriptor`.
1359
+ enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to
1360
+ their :class:`EnumDescriptor`.
1361
+ extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension
1362
+ names declared at file scope to their :class:`FieldDescriptor`.
1363
+ services_by_name (dict(str, ServiceDescriptor)): Mapping from services'
1364
+ names to their :class:`ServiceDescriptor`.
1365
+ pool (DescriptorPool): The pool this descriptor belongs to. When not passed
1366
+ to the constructor, the global default pool is used.
1367
+ """
1368
+
1369
+ if _USE_C_DESCRIPTORS:
1370
+ _C_DESCRIPTOR_CLASS = _message.FileDescriptor
1371
+
1372
+ def __new__(
1373
+ cls,
1374
+ name,
1375
+ package,
1376
+ options=None,
1377
+ serialized_options=None,
1378
+ serialized_pb=None,
1379
+ dependencies=None,
1380
+ public_dependencies=None,
1381
+ syntax=None,
1382
+ edition=None,
1383
+ pool=None,
1384
+ create_key=None,
1385
+ ):
1386
+ # FileDescriptor() is called from various places, not only from generated
1387
+ # files, to register dynamic proto files and messages.
1388
+ # pylint: disable=g-explicit-bool-comparison
1389
+ if serialized_pb:
1390
+ return _message.default_pool.AddSerializedFile(serialized_pb)
1391
+ else:
1392
+ return super(FileDescriptor, cls).__new__(cls)
1393
+
1394
+ def __init__(
1395
+ self,
1396
+ name,
1397
+ package,
1398
+ options=None,
1399
+ serialized_options=None,
1400
+ serialized_pb=None,
1401
+ dependencies=None,
1402
+ public_dependencies=None,
1403
+ syntax=None,
1404
+ edition=None,
1405
+ pool=None,
1406
+ create_key=None,
1407
+ ):
1408
+ """Constructor."""
1409
+ if create_key is not _internal_create_key:
1410
+ _Deprecated('create function FileDescriptor()')
1411
+
1412
+ super(FileDescriptor, self).__init__(
1413
+ self, options, serialized_options, 'FileOptions'
1414
+ )
1415
+
1416
+ if edition and edition != 'EDITION_UNKNOWN':
1417
+ self._edition = edition
1418
+ elif syntax == 'proto3':
1419
+ self._edition = 'EDITION_PROTO3'
1420
+ else:
1421
+ self._edition = 'EDITION_PROTO2'
1422
+
1423
+ if pool is None:
1424
+ from google.protobuf import descriptor_pool
1425
+
1426
+ pool = descriptor_pool.Default()
1427
+ self.pool = pool
1428
+ self.message_types_by_name = {}
1429
+ self.name = name
1430
+ self.package = package
1431
+ self.serialized_pb = serialized_pb
1432
+
1433
+ self.enum_types_by_name = {}
1434
+ self.extensions_by_name = {}
1435
+ self.services_by_name = {}
1436
+ self.dependencies = dependencies or []
1437
+ self.public_dependencies = public_dependencies or []
1438
+
1439
+ def CopyToProto(self, proto):
1440
+ """Copies this to a descriptor_pb2.FileDescriptorProto.
1441
+
1442
+ Args:
1443
+ proto: An empty descriptor_pb2.FileDescriptorProto.
1444
+ """
1445
+ proto.ParseFromString(self.serialized_pb)
1446
+
1447
+ @property
1448
+ def _parent(self):
1449
+ return None
1450
+
1451
+
1452
+ def _ParseOptions(message, string):
1453
+ """Parses serialized options.
1454
+
1455
+ This helper function is used to parse serialized options in generated
1456
+ proto2 files. It must not be used outside proto2.
1457
+ """
1458
+ message.ParseFromString(string)
1459
+ return message
1460
+
1461
+
1462
+ def _ToCamelCase(name):
1463
+ """Converts name to camel-case and returns it."""
1464
+ capitalize_next = False
1465
+ result = []
1466
+
1467
+ for c in name:
1468
+ if c == '_':
1469
+ if result:
1470
+ capitalize_next = True
1471
+ elif capitalize_next:
1472
+ result.append(c.upper())
1473
+ capitalize_next = False
1474
+ else:
1475
+ result += c
1476
+
1477
+ # Lower-case the first letter.
1478
+ if result and result[0].isupper():
1479
+ result[0] = result[0].lower()
1480
+ return ''.join(result)
1481
+
1482
+
1483
+ def _OptionsOrNone(descriptor_proto):
1484
+ """Returns the value of the field `options`, or None if it is not set."""
1485
+ if descriptor_proto.HasField('options'):
1486
+ return descriptor_proto.options
1487
+ else:
1488
+ return None
1489
+
1490
+
1491
+ def _ToJsonName(name):
1492
+ """Converts name to Json name and returns it."""
1493
+ capitalize_next = False
1494
+ result = []
1495
+
1496
+ for c in name:
1497
+ if c == '_':
1498
+ capitalize_next = True
1499
+ elif capitalize_next:
1500
+ result.append(c.upper())
1501
+ capitalize_next = False
1502
+ else:
1503
+ result += c
1504
+
1505
+ return ''.join(result)
1506
+
1507
+
1508
+ def MakeDescriptor(
1509
+ desc_proto,
1510
+ package='',
1511
+ build_file_if_cpp=True,
1512
+ syntax=None,
1513
+ edition=None,
1514
+ file_desc=None,
1515
+ ):
1516
+ """Make a protobuf Descriptor given a DescriptorProto protobuf.
1517
+
1518
+ Handles nested descriptors. Note that this is limited to the scope of defining
1519
+ a message inside of another message. Composite fields can currently only be
1520
+ resolved if the message is defined in the same scope as the field.
1521
+
1522
+ Args:
1523
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
1524
+ package: Optional package name for the new message Descriptor (string).
1525
+ build_file_if_cpp: Update the C++ descriptor pool if api matches. Set to
1526
+ False on recursion, so no duplicates are created.
1527
+ syntax: The syntax/semantics that should be used. Set to "proto3" to get
1528
+ proto3 field presence semantics.
1529
+ edition: The edition that should be used if syntax is "edition".
1530
+ file_desc: A FileDescriptor to place this descriptor into.
1531
+
1532
+ Returns:
1533
+ A Descriptor for protobuf messages.
1534
+ """
1535
+ # pylint: disable=g-import-not-at-top
1536
+ from google.protobuf import descriptor_pb2
1537
+
1538
+ # Generate a random name for this proto file to prevent conflicts with any
1539
+ # imported ones. We need to specify a file name so the descriptor pool
1540
+ # accepts our FileDescriptorProto, but it is not important what that file
1541
+ # name is actually set to.
1542
+ proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')
1543
+
1544
+ if package:
1545
+ file_name = os.path.join(package.replace('.', '/'), proto_name + '.proto')
1546
+ else:
1547
+ file_name = proto_name + '.proto'
1548
+
1549
+ if api_implementation.Type() != 'python' and build_file_if_cpp:
1550
+ # The C++ implementation requires all descriptors to be backed by the same
1551
+ # definition in the C++ descriptor pool. To do this, we build a
1552
+ # FileDescriptorProto with the same definition as this descriptor and build
1553
+ # it into the pool.
1554
+ file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
1555
+ file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
1556
+
1557
+ if package:
1558
+ file_descriptor_proto.package = package
1559
+ file_descriptor_proto.name = file_name
1560
+
1561
+ _message.default_pool.Add(file_descriptor_proto)
1562
+ result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
1563
+
1564
+ if _USE_C_DESCRIPTORS:
1565
+ return result.message_types_by_name[desc_proto.name]
1566
+
1567
+ if file_desc is None:
1568
+ file_desc = FileDescriptor(
1569
+ pool=None,
1570
+ name=file_name,
1571
+ package=package,
1572
+ syntax=syntax,
1573
+ edition=edition,
1574
+ options=None,
1575
+ serialized_pb='',
1576
+ dependencies=[],
1577
+ public_dependencies=[],
1578
+ create_key=_internal_create_key,
1579
+ )
1580
+ full_message_name = [desc_proto.name]
1581
+ if package:
1582
+ full_message_name.insert(0, package)
1583
+
1584
+ # Create Descriptors for enum types
1585
+ enum_types = {}
1586
+ for enum_proto in desc_proto.enum_type:
1587
+ full_name = '.'.join(full_message_name + [enum_proto.name])
1588
+ enum_desc = EnumDescriptor(
1589
+ enum_proto.name,
1590
+ full_name,
1591
+ None,
1592
+ [
1593
+ EnumValueDescriptor(
1594
+ enum_val.name,
1595
+ ii,
1596
+ enum_val.number,
1597
+ create_key=_internal_create_key,
1598
+ )
1599
+ for ii, enum_val in enumerate(enum_proto.value)
1600
+ ],
1601
+ file=file_desc,
1602
+ create_key=_internal_create_key,
1603
+ )
1604
+ enum_types[full_name] = enum_desc
1605
+
1606
+ # Create Descriptors for nested types
1607
+ nested_types = {}
1608
+ for nested_proto in desc_proto.nested_type:
1609
+ full_name = '.'.join(full_message_name + [nested_proto.name])
1610
+ # Nested types are just those defined inside of the message, not all types
1611
+ # used by fields in the message, so no loops are possible here.
1612
+ nested_desc = MakeDescriptor(
1613
+ nested_proto,
1614
+ package='.'.join(full_message_name),
1615
+ build_file_if_cpp=False,
1616
+ syntax=syntax,
1617
+ edition=edition,
1618
+ file_desc=file_desc,
1619
+ )
1620
+ nested_types[full_name] = nested_desc
1621
+
1622
+ fields = []
1623
+ for field_proto in desc_proto.field:
1624
+ full_name = '.'.join(full_message_name + [field_proto.name])
1625
+ enum_desc = None
1626
+ nested_desc = None
1627
+ if field_proto.json_name:
1628
+ json_name = field_proto.json_name
1629
+ else:
1630
+ json_name = None
1631
+ if field_proto.HasField('type_name'):
1632
+ type_name = field_proto.type_name
1633
+ full_type_name = '.'.join(
1634
+ full_message_name + [type_name[type_name.rfind('.') + 1 :]]
1635
+ )
1636
+ if full_type_name in nested_types:
1637
+ nested_desc = nested_types[full_type_name]
1638
+ elif full_type_name in enum_types:
1639
+ enum_desc = enum_types[full_type_name]
1640
+ # Else type_name references a non-local type, which isn't implemented
1641
+ field = FieldDescriptor(
1642
+ field_proto.name,
1643
+ full_name,
1644
+ field_proto.number - 1,
1645
+ field_proto.number,
1646
+ field_proto.type,
1647
+ FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
1648
+ field_proto.label,
1649
+ None,
1650
+ nested_desc,
1651
+ enum_desc,
1652
+ None,
1653
+ False,
1654
+ None,
1655
+ options=_OptionsOrNone(field_proto),
1656
+ has_default_value=False,
1657
+ json_name=json_name,
1658
+ file=file_desc,
1659
+ create_key=_internal_create_key,
1660
+ )
1661
+ fields.append(field)
1662
+
1663
+ desc_name = '.'.join(full_message_name)
1664
+ return Descriptor(
1665
+ desc_proto.name,
1666
+ desc_name,
1667
+ None,
1668
+ None,
1669
+ fields,
1670
+ list(nested_types.values()),
1671
+ list(enum_types.values()),
1672
+ [],
1673
+ options=_OptionsOrNone(desc_proto),
1674
+ file=file_desc,
1675
+ create_key=_internal_create_key,
1676
+ )
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_database.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides a container for DescriptorProtos."""
9
+
10
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
11
+
12
+ import warnings
13
+
14
+
15
+ class Error(Exception):
16
+ pass
17
+
18
+
19
+ class DescriptorDatabaseConflictingDefinitionError(Error):
20
+ """Raised when a proto is added with the same name & different descriptor."""
21
+
22
+
23
+ class DescriptorDatabase(object):
24
+ """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
25
+
26
+ def __init__(self):
27
+ self._file_desc_protos_by_file = {}
28
+ self._file_desc_protos_by_symbol = {}
29
+
30
+ def Add(self, file_desc_proto):
31
+ """Adds the FileDescriptorProto and its types to this database.
32
+
33
+ Args:
34
+ file_desc_proto: The FileDescriptorProto to add.
35
+ Raises:
36
+ DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
37
+ add a proto with the same name but different definition than an
38
+ existing proto in the database.
39
+ """
40
+ proto_name = file_desc_proto.name
41
+ if proto_name not in self._file_desc_protos_by_file:
42
+ self._file_desc_protos_by_file[proto_name] = file_desc_proto
43
+ elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
44
+ raise DescriptorDatabaseConflictingDefinitionError(
45
+ '%s already added, but with different descriptor.' % proto_name)
46
+ else:
47
+ return
48
+
49
+ # Add all the top-level descriptors to the index.
50
+ package = file_desc_proto.package
51
+ for message in file_desc_proto.message_type:
52
+ for name in _ExtractSymbols(message, package):
53
+ self._AddSymbol(name, file_desc_proto)
54
+ for enum in file_desc_proto.enum_type:
55
+ self._AddSymbol(
56
+ ('.'.join((package, enum.name)) if package else enum.name),
57
+ file_desc_proto,
58
+ )
59
+ for enum_value in enum.value:
60
+ self._file_desc_protos_by_symbol[
61
+ '.'.join((package, enum_value.name)) if package else enum_value.name
62
+ ] = file_desc_proto
63
+ for extension in file_desc_proto.extension:
64
+ self._AddSymbol(
65
+ ('.'.join((package, extension.name)) if package else extension.name),
66
+ file_desc_proto,
67
+ )
68
+ for service in file_desc_proto.service:
69
+ self._AddSymbol(
70
+ ('.'.join((package, service.name)) if package else service.name),
71
+ file_desc_proto,
72
+ )
73
+
74
+ def FindFileByName(self, name):
75
+ """Finds the file descriptor proto by file name.
76
+
77
+ Typically the file name is a relative path ending to a .proto file. The
78
+ proto with the given name will have to have been added to this database
79
+ using the Add method or else an error will be raised.
80
+
81
+ Args:
82
+ name: The file name to find.
83
+
84
+ Returns:
85
+ The file descriptor proto matching the name.
86
+
87
+ Raises:
88
+ KeyError if no file by the given name was added.
89
+ """
90
+
91
+ return self._file_desc_protos_by_file[name]
92
+
93
+ def FindFileContainingSymbol(self, symbol):
94
+ """Finds the file descriptor proto containing the specified symbol.
95
+
96
+ The symbol should be a fully qualified name including the file descriptor's
97
+ package and any containing messages. Some examples:
98
+
99
+ 'some.package.name.Message'
100
+ 'some.package.name.Message.NestedEnum'
101
+ 'some.package.name.Message.some_field'
102
+
103
+ The file descriptor proto containing the specified symbol must be added to
104
+ this database using the Add method or else an error will be raised.
105
+
106
+ Args:
107
+ symbol: The fully qualified symbol name.
108
+
109
+ Returns:
110
+ The file descriptor proto containing the symbol.
111
+
112
+ Raises:
113
+ KeyError if no file contains the specified symbol.
114
+ """
115
+ if symbol.count('.') == 1 and symbol[0] == '.':
116
+ symbol = symbol.lstrip('.')
117
+ warnings.warn(
118
+ 'Please remove the leading "." when '
119
+ 'FindFileContainingSymbol, this will turn to error '
120
+ 'in 2026 Jan.',
121
+ RuntimeWarning,
122
+ )
123
+ try:
124
+ return self._file_desc_protos_by_symbol[symbol]
125
+ except KeyError:
126
+ # Fields, enum values, and nested extensions are not in
127
+ # _file_desc_protos_by_symbol. Try to find the top level
128
+ # descriptor. Non-existent nested symbol under a valid top level
129
+ # descriptor can also be found. The behavior is the same with
130
+ # protobuf C++.
131
+ top_level, _, _ = symbol.rpartition('.')
132
+ try:
133
+ return self._file_desc_protos_by_symbol[top_level]
134
+ except KeyError:
135
+ # Raise the original symbol as a KeyError for better diagnostics.
136
+ raise KeyError(symbol)
137
+
138
+ def FindFileContainingExtension(self, extendee_name, extension_number):
139
+ # TODO: implement this API.
140
+ return None
141
+
142
+ def FindAllExtensionNumbers(self, extendee_name):
143
+ # TODO: implement this API.
144
+ return []
145
+
146
+ def _AddSymbol(self, name, file_desc_proto):
147
+ if name in self._file_desc_protos_by_symbol:
148
+ warn_msg = ('Conflict register for file "' + file_desc_proto.name +
149
+ '": ' + name +
150
+ ' is already defined in file "' +
151
+ self._file_desc_protos_by_symbol[name].name + '"')
152
+ warnings.warn(warn_msg, RuntimeWarning)
153
+ self._file_desc_protos_by_symbol[name] = file_desc_proto
154
+
155
+
156
+ def _ExtractSymbols(desc_proto, package):
157
+ """Pulls out all the symbols from a descriptor proto.
158
+
159
+ Args:
160
+ desc_proto: The proto to extract symbols from.
161
+ package: The package containing the descriptor type.
162
+
163
+ Yields:
164
+ The fully qualified name found in the descriptor.
165
+ """
166
+ message_name = package + '.' + desc_proto.name if package else desc_proto.name
167
+ yield message_name
168
+ for nested_type in desc_proto.nested_type:
169
+ for symbol in _ExtractSymbols(nested_type, message_name):
170
+ yield symbol
171
+ for enum_type in desc_proto.enum_type:
172
+ yield '.'.join((message_name, enum_type.name))
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_pb2.py ADDED
The diff for this file is too large to render. See raw diff
 
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/descriptor_pool.py ADDED
@@ -0,0 +1,1370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides DescriptorPool to use as a container for proto2 descriptors.
9
+
10
+ The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
11
+ a collection of protocol buffer descriptors for use when dynamically creating
12
+ message types at runtime.
13
+
14
+ For most applications protocol buffers should be used via modules generated by
15
+ the protocol buffer compiler tool. This should only be used when the type of
16
+ protocol buffers used in an application or library cannot be predetermined.
17
+
18
+ Below is a straightforward example on how to use this class::
19
+
20
+ pool = DescriptorPool()
21
+ file_descriptor_protos = [ ... ]
22
+ for file_descriptor_proto in file_descriptor_protos:
23
+ pool.Add(file_descriptor_proto)
24
+ my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
25
+
26
+ The message descriptor can be used in conjunction with the message_factory
27
+ module in order to create a protocol buffer class that can be encoded and
28
+ decoded.
29
+
30
+ If you want to get a Python class for the specified proto, use the
31
+ helper functions inside google.protobuf.message_factory
32
+ directly instead of this class.
33
+ """
34
+
35
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
36
+
37
+ import collections
38
+ import threading
39
+ import warnings
40
+
41
+ from google.protobuf import descriptor
42
+ from google.protobuf import descriptor_database
43
+ from google.protobuf import text_encoding
44
+ from google.protobuf.internal import python_edition_defaults
45
+ from google.protobuf.internal import python_message
46
+
47
+ _USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
48
+
49
+
50
+ def _NormalizeFullyQualifiedName(name):
51
+ """Remove leading period from fully-qualified type name.
52
+
53
+ Due to b/13860351 in descriptor_database.py, types in the root namespace are
54
+ generated with a leading period. This function removes that prefix.
55
+
56
+ Args:
57
+ name (str): The fully-qualified symbol name.
58
+
59
+ Returns:
60
+ str: The normalized fully-qualified symbol name.
61
+ """
62
+ return name.lstrip('.')
63
+
64
+
65
+ def _OptionsOrNone(descriptor_proto):
66
+ """Returns the value of the field `options`, or None if it is not set."""
67
+ if descriptor_proto.HasField('options'):
68
+ return descriptor_proto.options
69
+ else:
70
+ return None
71
+
72
+
73
+ def _IsMessageSetExtension(field):
74
+ return (field.is_extension and
75
+ field.containing_type.has_options and
76
+ field.containing_type.GetOptions().message_set_wire_format and
77
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
78
+ not field.is_required and
79
+ not field.is_repeated)
80
+
81
+ _edition_defaults_lock = threading.Lock()
82
+
83
+
84
+ class DescriptorPool(object):
85
+ """A collection of protobufs dynamically constructed by descriptor protos."""
86
+
87
+ if _USE_C_DESCRIPTORS:
88
+
89
+ def __new__(cls, descriptor_db=None):
90
+ # pylint: disable=protected-access
91
+ return descriptor._message.DescriptorPool(descriptor_db)
92
+
93
+ def __init__(
94
+ self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts=False
95
+ ):
96
+ """Initializes a Pool of proto buffs.
97
+
98
+ The descriptor_db argument to the constructor is provided to allow
99
+ specialized file descriptor proto lookup code to be triggered on demand. An
100
+ example would be an implementation which will read and compile a file
101
+ specified in a call to FindFileByName() and not require the call to Add()
102
+ at all. Results from this database will be cached internally here as well.
103
+
104
+ Args:
105
+ descriptor_db: A secondary source of file descriptors.
106
+ use_deprecated_legacy_json_field_conflicts: Unused, for compatibility with
107
+ C++.
108
+ """
109
+
110
+ self._internal_db = descriptor_database.DescriptorDatabase()
111
+ self._descriptor_db = descriptor_db
112
+ self._descriptors = {}
113
+ self._enum_descriptors = {}
114
+ self._service_descriptors = {}
115
+ self._file_descriptors = {}
116
+ self._toplevel_extensions = {}
117
+ self._top_enum_values = {}
118
+ # We store extensions in two two-level mappings: The first key is the
119
+ # descriptor of the message being extended, the second key is the extension
120
+ # full name or its tag number.
121
+ self._extensions_by_name = collections.defaultdict(dict)
122
+ self._extensions_by_number = collections.defaultdict(dict)
123
+ self._serialized_edition_defaults = (
124
+ python_edition_defaults._PROTOBUF_INTERNAL_PYTHON_EDITION_DEFAULTS
125
+ )
126
+ self._edition_defaults = None
127
+ self._feature_cache = dict()
128
+
129
+ def _CheckConflictRegister(self, desc, desc_name, file_name):
130
+ """Check if the descriptor name conflicts with another of the same name.
131
+
132
+ Args:
133
+ desc: Descriptor of a message, enum, service, extension or enum value.
134
+ desc_name (str): the full name of desc.
135
+ file_name (str): The file name of descriptor.
136
+ """
137
+ for register, descriptor_type in [
138
+ (self._descriptors, descriptor.Descriptor),
139
+ (self._enum_descriptors, descriptor.EnumDescriptor),
140
+ (self._service_descriptors, descriptor.ServiceDescriptor),
141
+ (self._toplevel_extensions, descriptor.FieldDescriptor),
142
+ (self._top_enum_values, descriptor.EnumValueDescriptor)]:
143
+ if desc_name in register:
144
+ old_desc = register[desc_name]
145
+ if isinstance(old_desc, descriptor.EnumValueDescriptor):
146
+ old_file = old_desc.type.file.name
147
+ else:
148
+ old_file = old_desc.file.name
149
+
150
+ if not isinstance(desc, descriptor_type) or (
151
+ old_file != file_name):
152
+ error_msg = ('Conflict register for file "' + file_name +
153
+ '": ' + desc_name +
154
+ ' is already defined in file "' +
155
+ old_file + '". Please fix the conflict by adding '
156
+ 'package name on the proto file, or use different '
157
+ 'name for the duplication.')
158
+ if isinstance(desc, descriptor.EnumValueDescriptor):
159
+ error_msg += ('\nNote: enum values appear as '
160
+ 'siblings of the enum type instead of '
161
+ 'children of it.')
162
+
163
+ raise TypeError(error_msg)
164
+
165
+ return
166
+
167
+ def Add(self, file_desc_proto):
168
+ """Adds the FileDescriptorProto and its types to this pool.
169
+
170
+ Args:
171
+ file_desc_proto (FileDescriptorProto): The file descriptor to add.
172
+ """
173
+
174
+ self._internal_db.Add(file_desc_proto)
175
+
176
+ def AddSerializedFile(self, serialized_file_desc_proto):
177
+ """Adds the FileDescriptorProto and its types to this pool.
178
+
179
+ Args:
180
+ serialized_file_desc_proto (bytes): A bytes string, serialization of the
181
+ :class:`FileDescriptorProto` to add.
182
+
183
+ Returns:
184
+ FileDescriptor: Descriptor for the added file.
185
+ """
186
+
187
+ # pylint: disable=g-import-not-at-top
188
+ from google.protobuf import descriptor_pb2
189
+ file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
190
+ serialized_file_desc_proto)
191
+ file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
192
+ file_desc.serialized_pb = serialized_file_desc_proto
193
+ return file_desc
194
+
195
+ # Never call this method. It is for internal usage only.
196
+ def _AddDescriptor(self, desc):
197
+ """Adds a Descriptor to the pool, non-recursively.
198
+
199
+ If the Descriptor contains nested messages or enums, the caller must
200
+ explicitly register them. This method also registers the FileDescriptor
201
+ associated with the message.
202
+
203
+ Args:
204
+ desc: A Descriptor.
205
+ """
206
+ if not isinstance(desc, descriptor.Descriptor):
207
+ raise TypeError('Expected instance of descriptor.Descriptor.')
208
+
209
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
210
+
211
+ self._descriptors[desc.full_name] = desc
212
+ self._AddFileDescriptor(desc.file)
213
+
214
+ # Never call this method. It is for internal usage only.
215
+ def _AddEnumDescriptor(self, enum_desc):
216
+ """Adds an EnumDescriptor to the pool.
217
+
218
+ This method also registers the FileDescriptor associated with the enum.
219
+
220
+ Args:
221
+ enum_desc: An EnumDescriptor.
222
+ """
223
+
224
+ if not isinstance(enum_desc, descriptor.EnumDescriptor):
225
+ raise TypeError('Expected instance of descriptor.EnumDescriptor.')
226
+
227
+ file_name = enum_desc.file.name
228
+ self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
229
+ self._enum_descriptors[enum_desc.full_name] = enum_desc
230
+
231
+ # Top enum values need to be indexed.
232
+ # Count the number of dots to see whether the enum is toplevel or nested
233
+ # in a message. We cannot use enum_desc.containing_type at this stage.
234
+ if enum_desc.file.package:
235
+ top_level = (enum_desc.full_name.count('.')
236
+ - enum_desc.file.package.count('.') == 1)
237
+ else:
238
+ top_level = enum_desc.full_name.count('.') == 0
239
+ if top_level:
240
+ file_name = enum_desc.file.name
241
+ package = enum_desc.file.package
242
+ for enum_value in enum_desc.values:
243
+ full_name = _NormalizeFullyQualifiedName(
244
+ '.'.join((package, enum_value.name)))
245
+ self._CheckConflictRegister(enum_value, full_name, file_name)
246
+ self._top_enum_values[full_name] = enum_value
247
+ self._AddFileDescriptor(enum_desc.file)
248
+
249
+ # Never call this method. It is for internal usage only.
250
+ def _AddServiceDescriptor(self, service_desc):
251
+ """Adds a ServiceDescriptor to the pool.
252
+
253
+ Args:
254
+ service_desc: A ServiceDescriptor.
255
+ """
256
+
257
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
258
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
259
+
260
+ self._CheckConflictRegister(service_desc, service_desc.full_name,
261
+ service_desc.file.name)
262
+ self._service_descriptors[service_desc.full_name] = service_desc
263
+
264
+ # Never call this method. It is for internal usage only.
265
+ def _AddExtensionDescriptor(self, extension):
266
+ """Adds a FieldDescriptor describing an extension to the pool.
267
+
268
+ Args:
269
+ extension: A FieldDescriptor.
270
+
271
+ Raises:
272
+ AssertionError: when another extension with the same number extends the
273
+ same message.
274
+ TypeError: when the specified extension is not a
275
+ descriptor.FieldDescriptor.
276
+ """
277
+ if not (isinstance(extension, descriptor.FieldDescriptor) and
278
+ extension.is_extension):
279
+ raise TypeError('Expected an extension descriptor.')
280
+
281
+ if extension.extension_scope is None:
282
+ self._CheckConflictRegister(
283
+ extension, extension.full_name, extension.file.name)
284
+ self._toplevel_extensions[extension.full_name] = extension
285
+
286
+ try:
287
+ existing_desc = self._extensions_by_number[
288
+ extension.containing_type][extension.number]
289
+ except KeyError:
290
+ pass
291
+ else:
292
+ if extension is not existing_desc:
293
+ raise AssertionError(
294
+ 'Extensions "%s" and "%s" both try to extend message type "%s" '
295
+ 'with field number %d.' %
296
+ (extension.full_name, existing_desc.full_name,
297
+ extension.containing_type.full_name, extension.number))
298
+
299
+ self._extensions_by_number[extension.containing_type][
300
+ extension.number] = extension
301
+ self._extensions_by_name[extension.containing_type][
302
+ extension.full_name] = extension
303
+
304
+ # Also register MessageSet extensions with the type name.
305
+ if _IsMessageSetExtension(extension):
306
+ self._extensions_by_name[extension.containing_type][
307
+ extension.message_type.full_name] = extension
308
+
309
+ if hasattr(extension.containing_type, '_concrete_class'):
310
+ python_message._AttachFieldHelpers(
311
+ extension.containing_type._concrete_class, extension)
312
+
313
+ # Never call this method. It is for internal usage only.
314
+ def _InternalAddFileDescriptor(self, file_desc):
315
+ """Adds a FileDescriptor to the pool, non-recursively.
316
+
317
+ If the FileDescriptor contains messages or enums, the caller must explicitly
318
+ register them.
319
+
320
+ Args:
321
+ file_desc: A FileDescriptor.
322
+ """
323
+
324
+ self._AddFileDescriptor(file_desc)
325
+
326
+ def _AddFileDescriptor(self, file_desc):
327
+ """Adds a FileDescriptor to the pool, non-recursively.
328
+
329
+ If the FileDescriptor contains messages or enums, the caller must explicitly
330
+ register them.
331
+
332
+ Args:
333
+ file_desc: A FileDescriptor.
334
+ """
335
+
336
+ if not isinstance(file_desc, descriptor.FileDescriptor):
337
+ raise TypeError('Expected instance of descriptor.FileDescriptor.')
338
+ self._file_descriptors[file_desc.name] = file_desc
339
+
340
+ def FindFileByName(self, file_name):
341
+ """Gets a FileDescriptor by file name.
342
+
343
+ Args:
344
+ file_name (str): The path to the file to get a descriptor for.
345
+
346
+ Returns:
347
+ FileDescriptor: The descriptor for the named file.
348
+
349
+ Raises:
350
+ KeyError: if the file cannot be found in the pool.
351
+ """
352
+
353
+ try:
354
+ return self._file_descriptors[file_name]
355
+ except KeyError:
356
+ pass
357
+
358
+ try:
359
+ file_proto = self._internal_db.FindFileByName(file_name)
360
+ except KeyError as error:
361
+ if self._descriptor_db:
362
+ file_proto = self._descriptor_db.FindFileByName(file_name)
363
+ else:
364
+ raise error
365
+ if not file_proto:
366
+ raise KeyError('Cannot find a file named %s' % file_name)
367
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
368
+
369
+ def FindFileContainingSymbol(self, symbol):
370
+ """Gets the FileDescriptor for the file containing the specified symbol.
371
+
372
+ Args:
373
+ symbol (str): The name of the symbol to search for.
374
+
375
+ Returns:
376
+ FileDescriptor: Descriptor for the file that contains the specified
377
+ symbol.
378
+
379
+ Raises:
380
+ KeyError: if the file cannot be found in the pool.
381
+ """
382
+
383
+ symbol = _NormalizeFullyQualifiedName(symbol)
384
+ try:
385
+ return self._InternalFindFileContainingSymbol(symbol)
386
+ except KeyError:
387
+ pass
388
+
389
+ try:
390
+ # Try fallback database. Build and find again if possible.
391
+ self._FindFileContainingSymbolInDb(symbol)
392
+ return self._InternalFindFileContainingSymbol(symbol)
393
+ except KeyError:
394
+ raise KeyError('Cannot find a file containing %s' % symbol)
395
+
396
+ def _InternalFindFileContainingSymbol(self, symbol):
397
+ """Gets the already built FileDescriptor containing the specified symbol.
398
+
399
+ Args:
400
+ symbol (str): The name of the symbol to search for.
401
+
402
+ Returns:
403
+ FileDescriptor: Descriptor for the file that contains the specified
404
+ symbol.
405
+
406
+ Raises:
407
+ KeyError: if the file cannot be found in the pool.
408
+ """
409
+ try:
410
+ return self._descriptors[symbol].file
411
+ except KeyError:
412
+ pass
413
+
414
+ try:
415
+ return self._enum_descriptors[symbol].file
416
+ except KeyError:
417
+ pass
418
+
419
+ try:
420
+ return self._service_descriptors[symbol].file
421
+ except KeyError:
422
+ pass
423
+
424
+ try:
425
+ return self._top_enum_values[symbol].type.file
426
+ except KeyError:
427
+ pass
428
+
429
+ try:
430
+ return self._toplevel_extensions[symbol].file
431
+ except KeyError:
432
+ pass
433
+
434
+ # Try fields, enum values and nested extensions inside a message.
435
+ top_name, _, sub_name = symbol.rpartition('.')
436
+ try:
437
+ message = self.FindMessageTypeByName(top_name)
438
+ assert (sub_name in message.extensions_by_name or
439
+ sub_name in message.fields_by_name or
440
+ sub_name in message.enum_values_by_name)
441
+ return message.file
442
+ except (KeyError, AssertionError):
443
+ raise KeyError('Cannot find a file containing %s' % symbol)
444
+
445
+ def FindMessageTypeByName(self, full_name):
446
+ """Loads the named descriptor from the pool.
447
+
448
+ Args:
449
+ full_name (str): The full name of the descriptor to load.
450
+
451
+ Returns:
452
+ Descriptor: The descriptor for the named type.
453
+
454
+ Raises:
455
+ KeyError: if the message cannot be found in the pool.
456
+ """
457
+
458
+ full_name = _NormalizeFullyQualifiedName(full_name)
459
+ if full_name not in self._descriptors:
460
+ self._FindFileContainingSymbolInDb(full_name)
461
+ return self._descriptors[full_name]
462
+
463
+ def FindEnumTypeByName(self, full_name):
464
+ """Loads the named enum descriptor from the pool.
465
+
466
+ Args:
467
+ full_name (str): The full name of the enum descriptor to load.
468
+
469
+ Returns:
470
+ EnumDescriptor: The enum descriptor for the named type.
471
+
472
+ Raises:
473
+ KeyError: if the enum cannot be found in the pool.
474
+ """
475
+
476
+ full_name = _NormalizeFullyQualifiedName(full_name)
477
+ if full_name not in self._enum_descriptors:
478
+ self._FindFileContainingSymbolInDb(full_name)
479
+ return self._enum_descriptors[full_name]
480
+
481
+ def FindFieldByName(self, full_name):
482
+ """Loads the named field descriptor from the pool.
483
+
484
+ Args:
485
+ full_name (str): The full name of the field descriptor to load.
486
+
487
+ Returns:
488
+ FieldDescriptor: The field descriptor for the named field.
489
+
490
+ Raises:
491
+ KeyError: if the field cannot be found in the pool.
492
+ """
493
+ full_name = _NormalizeFullyQualifiedName(full_name)
494
+ message_name, _, field_name = full_name.rpartition('.')
495
+ message_descriptor = self.FindMessageTypeByName(message_name)
496
+ return message_descriptor.fields_by_name[field_name]
497
+
498
+ def FindOneofByName(self, full_name):
499
+ """Loads the named oneof descriptor from the pool.
500
+
501
+ Args:
502
+ full_name (str): The full name of the oneof descriptor to load.
503
+
504
+ Returns:
505
+ OneofDescriptor: The oneof descriptor for the named oneof.
506
+
507
+ Raises:
508
+ KeyError: if the oneof cannot be found in the pool.
509
+ """
510
+ full_name = _NormalizeFullyQualifiedName(full_name)
511
+ message_name, _, oneof_name = full_name.rpartition('.')
512
+ message_descriptor = self.FindMessageTypeByName(message_name)
513
+ return message_descriptor.oneofs_by_name[oneof_name]
514
+
515
+ def FindExtensionByName(self, full_name):
516
+ """Loads the named extension descriptor from the pool.
517
+
518
+ Args:
519
+ full_name (str): The full name of the extension descriptor to load.
520
+
521
+ Returns:
522
+ FieldDescriptor: The field descriptor for the named extension.
523
+
524
+ Raises:
525
+ KeyError: if the extension cannot be found in the pool.
526
+ """
527
+ full_name = _NormalizeFullyQualifiedName(full_name)
528
+ try:
529
+ # The proto compiler does not give any link between the FileDescriptor
530
+ # and top-level extensions unless the FileDescriptorProto is added to
531
+ # the DescriptorDatabase, but this can impact memory usage.
532
+ # So we registered these extensions by name explicitly.
533
+ return self._toplevel_extensions[full_name]
534
+ except KeyError:
535
+ pass
536
+ message_name, _, extension_name = full_name.rpartition('.')
537
+ try:
538
+ # Most extensions are nested inside a message.
539
+ scope = self.FindMessageTypeByName(message_name)
540
+ except KeyError:
541
+ # Some extensions are defined at file scope.
542
+ scope = self._FindFileContainingSymbolInDb(full_name)
543
+ return scope.extensions_by_name[extension_name]
544
+
545
+ def FindExtensionByNumber(self, message_descriptor, number):
546
+ """Gets the extension of the specified message with the specified number.
547
+
548
+ Extensions have to be registered to this pool by calling :func:`Add` or
549
+ :func:`AddExtensionDescriptor`.
550
+
551
+ Args:
552
+ message_descriptor (Descriptor): descriptor of the extended message.
553
+ number (int): Number of the extension field.
554
+
555
+ Returns:
556
+ FieldDescriptor: The descriptor for the extension.
557
+
558
+ Raises:
559
+ KeyError: when no extension with the given number is known for the
560
+ specified message.
561
+ """
562
+ try:
563
+ return self._extensions_by_number[message_descriptor][number]
564
+ except KeyError:
565
+ self._TryLoadExtensionFromDB(message_descriptor, number)
566
+ return self._extensions_by_number[message_descriptor][number]
567
+
568
+ def FindAllExtensions(self, message_descriptor):
569
+ """Gets all the known extensions of a given message.
570
+
571
+ Extensions have to be registered to this pool by build related
572
+ :func:`Add` or :func:`AddExtensionDescriptor`.
573
+
574
+ Args:
575
+ message_descriptor (Descriptor): Descriptor of the extended message.
576
+
577
+ Returns:
578
+ list[FieldDescriptor]: Field descriptors describing the extensions.
579
+ """
580
+ # Fallback to descriptor db if FindAllExtensionNumbers is provided.
581
+ if self._descriptor_db and hasattr(
582
+ self._descriptor_db, 'FindAllExtensionNumbers'):
583
+ full_name = message_descriptor.full_name
584
+ try:
585
+ all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
586
+ except:
587
+ pass
588
+ else:
589
+ if isinstance(all_numbers, list):
590
+ for number in all_numbers:
591
+ if number in self._extensions_by_number[message_descriptor]:
592
+ continue
593
+ self._TryLoadExtensionFromDB(message_descriptor, number)
594
+ else:
595
+ warnings.warn(
596
+ 'FindAllExtensionNumbers() on fall back DB must return a list,'
597
+ ' not {0}'.format(type(all_numbers))
598
+ )
599
+
600
+ return list(self._extensions_by_number[message_descriptor].values())
601
+
602
+ def _TryLoadExtensionFromDB(self, message_descriptor, number):
603
+ """Try to Load extensions from descriptor db.
604
+
605
+ Args:
606
+ message_descriptor: descriptor of the extended message.
607
+ number: the extension number that needs to be loaded.
608
+ """
609
+ if not self._descriptor_db:
610
+ return
611
+ # Only supported when FindFileContainingExtension is provided.
612
+ if not hasattr(
613
+ self._descriptor_db, 'FindFileContainingExtension'):
614
+ return
615
+
616
+ full_name = message_descriptor.full_name
617
+ file_proto = None
618
+ try:
619
+ file_proto = self._descriptor_db.FindFileContainingExtension(
620
+ full_name, number
621
+ )
622
+ except:
623
+ return
624
+
625
+ if file_proto is None:
626
+ return
627
+
628
+ try:
629
+ self._ConvertFileProtoToFileDescriptor(file_proto)
630
+ except:
631
+ warn_msg = ('Unable to load proto file %s for extension number %d.' %
632
+ (file_proto.name, number))
633
+ warnings.warn(warn_msg, RuntimeWarning)
634
+
635
+ def FindServiceByName(self, full_name):
636
+ """Loads the named service descriptor from the pool.
637
+
638
+ Args:
639
+ full_name (str): The full name of the service descriptor to load.
640
+
641
+ Returns:
642
+ ServiceDescriptor: The service descriptor for the named service.
643
+
644
+ Raises:
645
+ KeyError: if the service cannot be found in the pool.
646
+ """
647
+ full_name = _NormalizeFullyQualifiedName(full_name)
648
+ if full_name not in self._service_descriptors:
649
+ self._FindFileContainingSymbolInDb(full_name)
650
+ return self._service_descriptors[full_name]
651
+
652
+ def FindMethodByName(self, full_name):
653
+ """Loads the named service method descriptor from the pool.
654
+
655
+ Args:
656
+ full_name (str): The full name of the method descriptor to load.
657
+
658
+ Returns:
659
+ MethodDescriptor: The method descriptor for the service method.
660
+
661
+ Raises:
662
+ KeyError: if the method cannot be found in the pool.
663
+ """
664
+ full_name = _NormalizeFullyQualifiedName(full_name)
665
+ service_name, _, method_name = full_name.rpartition('.')
666
+ service_descriptor = self.FindServiceByName(service_name)
667
+ return service_descriptor.methods_by_name[method_name]
668
+
669
+ def SetFeatureSetDefaults(self, defaults):
670
+ """Sets the default feature mappings used during the build.
671
+
672
+ Args:
673
+ defaults: a FeatureSetDefaults message containing the new mappings.
674
+ """
675
+ if self._edition_defaults is not None:
676
+ raise ValueError(
677
+ "Feature set defaults can't be changed once the pool has started"
678
+ ' building!'
679
+ )
680
+
681
+ # pylint: disable=g-import-not-at-top
682
+ from google.protobuf import descriptor_pb2
683
+
684
+ if not isinstance(defaults, descriptor_pb2.FeatureSetDefaults):
685
+ raise TypeError('SetFeatureSetDefaults called with invalid type')
686
+
687
+ if defaults.minimum_edition > defaults.maximum_edition:
688
+ raise ValueError(
689
+ 'Invalid edition range %s to %s'
690
+ % (
691
+ descriptor_pb2.Edition.Name(defaults.minimum_edition),
692
+ descriptor_pb2.Edition.Name(defaults.maximum_edition),
693
+ )
694
+ )
695
+
696
+ prev_edition = descriptor_pb2.Edition.EDITION_UNKNOWN
697
+ for d in defaults.defaults:
698
+ if d.edition == descriptor_pb2.Edition.EDITION_UNKNOWN:
699
+ raise ValueError('Invalid edition EDITION_UNKNOWN specified')
700
+ if prev_edition >= d.edition:
701
+ raise ValueError(
702
+ 'Feature set defaults are not strictly increasing. %s is greater'
703
+ ' than or equal to %s'
704
+ % (
705
+ descriptor_pb2.Edition.Name(prev_edition),
706
+ descriptor_pb2.Edition.Name(d.edition),
707
+ )
708
+ )
709
+ prev_edition = d.edition
710
+ self._edition_defaults = defaults
711
+
712
+ def _CreateDefaultFeatures(self, edition):
713
+ """Creates a FeatureSet message with defaults for a specific edition.
714
+
715
+ Args:
716
+ edition: the edition to generate defaults for.
717
+
718
+ Returns:
719
+ A FeatureSet message with defaults for a specific edition.
720
+ """
721
+ # pylint: disable=g-import-not-at-top
722
+ from google.protobuf import descriptor_pb2
723
+
724
+ with _edition_defaults_lock:
725
+ if not self._edition_defaults:
726
+ self._edition_defaults = descriptor_pb2.FeatureSetDefaults()
727
+ self._edition_defaults.ParseFromString(
728
+ self._serialized_edition_defaults
729
+ )
730
+
731
+ if edition < self._edition_defaults.minimum_edition:
732
+ raise TypeError(
733
+ 'Edition %s is earlier than the minimum supported edition %s!'
734
+ % (
735
+ descriptor_pb2.Edition.Name(edition),
736
+ descriptor_pb2.Edition.Name(
737
+ self._edition_defaults.minimum_edition
738
+ ),
739
+ )
740
+ )
741
+ if edition > self._edition_defaults.maximum_edition:
742
+ raise TypeError(
743
+ 'Edition %s is later than the maximum supported edition %s!'
744
+ % (
745
+ descriptor_pb2.Edition.Name(edition),
746
+ descriptor_pb2.Edition.Name(
747
+ self._edition_defaults.maximum_edition
748
+ ),
749
+ )
750
+ )
751
+ found = None
752
+ for d in self._edition_defaults.defaults:
753
+ if d.edition > edition:
754
+ break
755
+ found = d
756
+ if found is None:
757
+ raise TypeError(
758
+ 'No valid default found for edition %s!'
759
+ % descriptor_pb2.Edition.Name(edition)
760
+ )
761
+
762
+ defaults = descriptor_pb2.FeatureSet()
763
+ defaults.CopyFrom(found.fixed_features)
764
+ defaults.MergeFrom(found.overridable_features)
765
+ return defaults
766
+
767
+ def _InternFeatures(self, features):
768
+ serialized = features.SerializeToString()
769
+ with _edition_defaults_lock:
770
+ cached = self._feature_cache.get(serialized)
771
+ if cached is None:
772
+ self._feature_cache[serialized] = features
773
+ cached = features
774
+ return cached
775
+
776
+ def _FindFileContainingSymbolInDb(self, symbol):
777
+ """Finds the file in descriptor DB containing the specified symbol.
778
+
779
+ Args:
780
+ symbol (str): The name of the symbol to search for.
781
+
782
+ Returns:
783
+ FileDescriptor: The file that contains the specified symbol.
784
+
785
+ Raises:
786
+ KeyError: if the file cannot be found in the descriptor database.
787
+ """
788
+ try:
789
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
790
+ except KeyError as error:
791
+ if self._descriptor_db:
792
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
793
+ else:
794
+ raise error
795
+ if not file_proto:
796
+ raise KeyError('Cannot find a file containing %s' % symbol)
797
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
798
+
799
+ def _ConvertFileProtoToFileDescriptor(self, file_proto):
800
+ """Creates a FileDescriptor from a proto or returns a cached copy.
801
+
802
+ This method also has the side effect of loading all the symbols found in
803
+ the file into the appropriate dictionaries in the pool.
804
+
805
+ Args:
806
+ file_proto: The proto to convert.
807
+
808
+ Returns:
809
+ A FileDescriptor matching the passed in proto.
810
+ """
811
+ if file_proto.name not in self._file_descriptors:
812
+ built_deps = list(self._GetDeps(file_proto.dependency))
813
+ direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
814
+ public_deps = [direct_deps[i] for i in file_proto.public_dependency]
815
+
816
+ # pylint: disable=g-import-not-at-top
817
+ from google.protobuf import descriptor_pb2
818
+
819
+ file_descriptor = descriptor.FileDescriptor(
820
+ pool=self,
821
+ name=file_proto.name,
822
+ package=file_proto.package,
823
+ syntax=file_proto.syntax,
824
+ edition=descriptor_pb2.Edition.Name(file_proto.edition),
825
+ options=_OptionsOrNone(file_proto),
826
+ serialized_pb=file_proto.SerializeToString(),
827
+ dependencies=direct_deps,
828
+ public_dependencies=public_deps,
829
+ # pylint: disable=protected-access
830
+ create_key=descriptor._internal_create_key,
831
+ )
832
+ scope = {}
833
+
834
+ # This loop extracts all the message and enum types from all the
835
+ # dependencies of the file_proto. This is necessary to create the
836
+ # scope of available message types when defining the passed in
837
+ # file proto.
838
+ for dependency in built_deps:
839
+ scope.update(self._ExtractSymbols(
840
+ dependency.message_types_by_name.values()))
841
+ scope.update((_PrefixWithDot(enum.full_name), enum)
842
+ for enum in dependency.enum_types_by_name.values())
843
+
844
+ for message_type in file_proto.message_type:
845
+ message_desc = self._ConvertMessageDescriptor(
846
+ message_type, file_proto.package, file_descriptor, scope,
847
+ file_proto.syntax)
848
+ file_descriptor.message_types_by_name[message_desc.name] = (
849
+ message_desc)
850
+
851
+ for enum_type in file_proto.enum_type:
852
+ file_descriptor.enum_types_by_name[enum_type.name] = (
853
+ self._ConvertEnumDescriptor(enum_type, file_proto.package,
854
+ file_descriptor, None, scope, True))
855
+
856
+ for index, extension_proto in enumerate(file_proto.extension):
857
+ extension_desc = self._MakeFieldDescriptor(
858
+ extension_proto, file_proto.package, index, file_descriptor,
859
+ is_extension=True)
860
+ extension_desc.containing_type = self._GetTypeFromScope(
861
+ file_descriptor.package, extension_proto.extendee, scope)
862
+ self._SetFieldType(extension_proto, extension_desc,
863
+ file_descriptor.package, scope)
864
+ file_descriptor.extensions_by_name[extension_desc.name] = (
865
+ extension_desc)
866
+
867
+ for desc_proto in file_proto.message_type:
868
+ self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
869
+
870
+ if file_proto.package:
871
+ desc_proto_prefix = _PrefixWithDot(file_proto.package)
872
+ else:
873
+ desc_proto_prefix = ''
874
+
875
+ for desc_proto in file_proto.message_type:
876
+ desc = self._GetTypeFromScope(
877
+ desc_proto_prefix, desc_proto.name, scope)
878
+ file_descriptor.message_types_by_name[desc_proto.name] = desc
879
+
880
+ for index, service_proto in enumerate(file_proto.service):
881
+ file_descriptor.services_by_name[service_proto.name] = (
882
+ self._MakeServiceDescriptor(service_proto, index, scope,
883
+ file_proto.package, file_descriptor))
884
+
885
+ self._file_descriptors[file_proto.name] = file_descriptor
886
+
887
+ # Add extensions to the pool
888
+ def AddExtensionForNested(message_type):
889
+ for nested in message_type.nested_types:
890
+ AddExtensionForNested(nested)
891
+ for extension in message_type.extensions:
892
+ self._AddExtensionDescriptor(extension)
893
+
894
+ file_desc = self._file_descriptors[file_proto.name]
895
+ for extension in file_desc.extensions_by_name.values():
896
+ self._AddExtensionDescriptor(extension)
897
+ for message_type in file_desc.message_types_by_name.values():
898
+ AddExtensionForNested(message_type)
899
+
900
+ return file_desc
901
+
902
+ def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
903
+ scope=None, syntax=None):
904
+ """Adds the proto to the pool in the specified package.
905
+
906
+ Args:
907
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
908
+ package: The package the proto should be located in.
909
+ file_desc: The file containing this message.
910
+ scope: Dict mapping short and full symbols to message and enum types.
911
+ syntax: string indicating syntax of the file ("proto2" or "proto3")
912
+
913
+ Returns:
914
+ The added descriptor.
915
+ """
916
+
917
+ if package:
918
+ desc_name = '.'.join((package, desc_proto.name))
919
+ else:
920
+ desc_name = desc_proto.name
921
+
922
+ if file_desc is None:
923
+ file_name = None
924
+ else:
925
+ file_name = file_desc.name
926
+
927
+ if scope is None:
928
+ scope = {}
929
+
930
+ nested = [
931
+ self._ConvertMessageDescriptor(
932
+ nested, desc_name, file_desc, scope, syntax)
933
+ for nested in desc_proto.nested_type]
934
+ enums = [
935
+ self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
936
+ scope, False)
937
+ for enum in desc_proto.enum_type]
938
+ fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
939
+ for index, field in enumerate(desc_proto.field)]
940
+ extensions = [
941
+ self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
942
+ is_extension=True)
943
+ for index, extension in enumerate(desc_proto.extension)]
944
+ oneofs = [
945
+ # pylint: disable=g-complex-comprehension
946
+ descriptor.OneofDescriptor(
947
+ desc.name,
948
+ '.'.join((desc_name, desc.name)),
949
+ index,
950
+ None,
951
+ [],
952
+ _OptionsOrNone(desc),
953
+ # pylint: disable=protected-access
954
+ create_key=descriptor._internal_create_key)
955
+ for index, desc in enumerate(desc_proto.oneof_decl)
956
+ ]
957
+ extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
958
+ if extension_ranges:
959
+ is_extendable = True
960
+ else:
961
+ is_extendable = False
962
+ desc = descriptor.Descriptor(
963
+ name=desc_proto.name,
964
+ full_name=desc_name,
965
+ filename=file_name,
966
+ containing_type=None,
967
+ fields=fields,
968
+ oneofs=oneofs,
969
+ nested_types=nested,
970
+ enum_types=enums,
971
+ extensions=extensions,
972
+ options=_OptionsOrNone(desc_proto),
973
+ is_extendable=is_extendable,
974
+ extension_ranges=extension_ranges,
975
+ file=file_desc,
976
+ serialized_start=None,
977
+ serialized_end=None,
978
+ is_map_entry=desc_proto.options.map_entry,
979
+ # pylint: disable=protected-access
980
+ create_key=descriptor._internal_create_key,
981
+ )
982
+ for nested in desc.nested_types:
983
+ nested.containing_type = desc
984
+ for enum in desc.enum_types:
985
+ enum.containing_type = desc
986
+ for field_index, field_desc in enumerate(desc_proto.field):
987
+ if field_desc.HasField('oneof_index'):
988
+ oneof_index = field_desc.oneof_index
989
+ oneofs[oneof_index].fields.append(fields[field_index])
990
+ fields[field_index].containing_oneof = oneofs[oneof_index]
991
+
992
+ scope[_PrefixWithDot(desc_name)] = desc
993
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
994
+ self._descriptors[desc_name] = desc
995
+ return desc
996
+
997
+ def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
998
+ containing_type=None, scope=None, top_level=False):
999
+ """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
1000
+
1001
+ Args:
1002
+ enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
1003
+ package: Optional package name for the new message EnumDescriptor.
1004
+ file_desc: The file containing the enum descriptor.
1005
+ containing_type: The type containing this enum.
1006
+ scope: Scope containing available types.
1007
+ top_level: If True, the enum is a top level symbol. If False, the enum
1008
+ is defined inside a message.
1009
+
1010
+ Returns:
1011
+ The added descriptor
1012
+ """
1013
+
1014
+ if package:
1015
+ enum_name = '.'.join((package, enum_proto.name))
1016
+ else:
1017
+ enum_name = enum_proto.name
1018
+
1019
+ if file_desc is None:
1020
+ file_name = None
1021
+ else:
1022
+ file_name = file_desc.name
1023
+
1024
+ values = [self._MakeEnumValueDescriptor(value, index)
1025
+ for index, value in enumerate(enum_proto.value)]
1026
+ desc = descriptor.EnumDescriptor(name=enum_proto.name,
1027
+ full_name=enum_name,
1028
+ filename=file_name,
1029
+ file=file_desc,
1030
+ values=values,
1031
+ containing_type=containing_type,
1032
+ options=_OptionsOrNone(enum_proto),
1033
+ # pylint: disable=protected-access
1034
+ create_key=descriptor._internal_create_key)
1035
+ scope['.%s' % enum_name] = desc
1036
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
1037
+ self._enum_descriptors[enum_name] = desc
1038
+
1039
+ # Add top level enum values.
1040
+ if top_level:
1041
+ for value in values:
1042
+ full_name = _NormalizeFullyQualifiedName(
1043
+ '.'.join((package, value.name)))
1044
+ self._CheckConflictRegister(value, full_name, file_name)
1045
+ self._top_enum_values[full_name] = value
1046
+
1047
+ return desc
1048
+
1049
+ def _MakeFieldDescriptor(self, field_proto, message_name, index,
1050
+ file_desc, is_extension=False):
1051
+ """Creates a field descriptor from a FieldDescriptorProto.
1052
+
1053
+ For message and enum type fields, this method will do a look up
1054
+ in the pool for the appropriate descriptor for that type. If it
1055
+ is unavailable, it will fall back to the _source function to
1056
+ create it. If this type is still unavailable, construction will
1057
+ fail.
1058
+
1059
+ Args:
1060
+ field_proto: The proto describing the field.
1061
+ message_name: The name of the containing message.
1062
+ index: Index of the field
1063
+ file_desc: The file containing the field descriptor.
1064
+ is_extension: Indication that this field is for an extension.
1065
+
1066
+ Returns:
1067
+ An initialized FieldDescriptor object
1068
+ """
1069
+
1070
+ if message_name:
1071
+ full_name = '.'.join((message_name, field_proto.name))
1072
+ else:
1073
+ full_name = field_proto.name
1074
+
1075
+ if field_proto.json_name:
1076
+ json_name = field_proto.json_name
1077
+ else:
1078
+ json_name = None
1079
+
1080
+ return descriptor.FieldDescriptor(
1081
+ name=field_proto.name,
1082
+ full_name=full_name,
1083
+ index=index,
1084
+ number=field_proto.number,
1085
+ type=field_proto.type,
1086
+ cpp_type=None,
1087
+ message_type=None,
1088
+ enum_type=None,
1089
+ containing_type=None,
1090
+ label=field_proto.label,
1091
+ has_default_value=False,
1092
+ default_value=None,
1093
+ is_extension=is_extension,
1094
+ extension_scope=None,
1095
+ options=_OptionsOrNone(field_proto),
1096
+ json_name=json_name,
1097
+ file=file_desc,
1098
+ # pylint: disable=protected-access
1099
+ create_key=descriptor._internal_create_key)
1100
+
1101
+ def _SetAllFieldTypes(self, package, desc_proto, scope):
1102
+ """Sets all the descriptor's fields's types.
1103
+
1104
+ This method also sets the containing types on any extensions.
1105
+
1106
+ Args:
1107
+ package: The current package of desc_proto.
1108
+ desc_proto: The message descriptor to update.
1109
+ scope: Enclosing scope of available types.
1110
+ """
1111
+
1112
+ package = _PrefixWithDot(package)
1113
+
1114
+ main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
1115
+
1116
+ if package == '.':
1117
+ nested_package = _PrefixWithDot(desc_proto.name)
1118
+ else:
1119
+ nested_package = '.'.join([package, desc_proto.name])
1120
+
1121
+ for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
1122
+ self._SetFieldType(field_proto, field_desc, nested_package, scope)
1123
+
1124
+ for extension_proto, extension_desc in (
1125
+ zip(desc_proto.extension, main_desc.extensions)):
1126
+ extension_desc.containing_type = self._GetTypeFromScope(
1127
+ nested_package, extension_proto.extendee, scope)
1128
+ self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
1129
+
1130
+ for nested_type in desc_proto.nested_type:
1131
+ self._SetAllFieldTypes(nested_package, nested_type, scope)
1132
+
1133
+ def _SetFieldType(self, field_proto, field_desc, package, scope):
1134
+ """Sets the field's type, cpp_type, message_type and enum_type.
1135
+
1136
+ Args:
1137
+ field_proto: Data about the field in proto format.
1138
+ field_desc: The descriptor to modify.
1139
+ package: The package the field's container is in.
1140
+ scope: Enclosing scope of available types.
1141
+ """
1142
+ if field_proto.type_name:
1143
+ desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
1144
+ else:
1145
+ desc = None
1146
+
1147
+ if not field_proto.HasField('type'):
1148
+ if isinstance(desc, descriptor.Descriptor):
1149
+ field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
1150
+ else:
1151
+ field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
1152
+
1153
+ field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
1154
+ field_proto.type)
1155
+
1156
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
1157
+ or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
1158
+ field_desc.message_type = desc
1159
+
1160
+ if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1161
+ field_desc.enum_type = desc
1162
+
1163
+ if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
1164
+ field_desc.has_default_value = False
1165
+ field_desc.default_value = []
1166
+ elif field_proto.HasField('default_value'):
1167
+ field_desc.has_default_value = True
1168
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
1169
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
1170
+ field_desc.default_value = float(field_proto.default_value)
1171
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
1172
+ field_desc.default_value = field_proto.default_value
1173
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
1174
+ field_desc.default_value = field_proto.default_value.lower() == 'true'
1175
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1176
+ field_desc.default_value = field_desc.enum_type.values_by_name[
1177
+ field_proto.default_value].number
1178
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
1179
+ field_desc.default_value = text_encoding.CUnescape(
1180
+ field_proto.default_value)
1181
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
1182
+ field_desc.default_value = None
1183
+ else:
1184
+ # All other types are of the "int" type.
1185
+ field_desc.default_value = int(field_proto.default_value)
1186
+ else:
1187
+ field_desc.has_default_value = False
1188
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
1189
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
1190
+ field_desc.default_value = 0.0
1191
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
1192
+ field_desc.default_value = u''
1193
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
1194
+ field_desc.default_value = False
1195
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1196
+ field_desc.default_value = field_desc.enum_type.values[0].number
1197
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
1198
+ field_desc.default_value = b''
1199
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
1200
+ field_desc.default_value = None
1201
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
1202
+ field_desc.default_value = None
1203
+ else:
1204
+ # All other types are of the "int" type.
1205
+ field_desc.default_value = 0
1206
+
1207
+ field_desc.type = field_proto.type
1208
+
1209
+ def _MakeEnumValueDescriptor(self, value_proto, index):
1210
+ """Creates a enum value descriptor object from a enum value proto.
1211
+
1212
+ Args:
1213
+ value_proto: The proto describing the enum value.
1214
+ index: The index of the enum value.
1215
+
1216
+ Returns:
1217
+ An initialized EnumValueDescriptor object.
1218
+ """
1219
+
1220
+ return descriptor.EnumValueDescriptor(
1221
+ name=value_proto.name,
1222
+ index=index,
1223
+ number=value_proto.number,
1224
+ options=_OptionsOrNone(value_proto),
1225
+ type=None,
1226
+ # pylint: disable=protected-access
1227
+ create_key=descriptor._internal_create_key)
1228
+
1229
+ def _MakeServiceDescriptor(self, service_proto, service_index, scope,
1230
+ package, file_desc):
1231
+ """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
1232
+
1233
+ Args:
1234
+ service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
1235
+ service_index: The index of the service in the File.
1236
+ scope: Dict mapping short and full symbols to message and enum types.
1237
+ package: Optional package name for the new message EnumDescriptor.
1238
+ file_desc: The file containing the service descriptor.
1239
+
1240
+ Returns:
1241
+ The added descriptor.
1242
+ """
1243
+
1244
+ if package:
1245
+ service_name = '.'.join((package, service_proto.name))
1246
+ else:
1247
+ service_name = service_proto.name
1248
+
1249
+ methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
1250
+ scope, index)
1251
+ for index, method_proto in enumerate(service_proto.method)]
1252
+ desc = descriptor.ServiceDescriptor(
1253
+ name=service_proto.name,
1254
+ full_name=service_name,
1255
+ index=service_index,
1256
+ methods=methods,
1257
+ options=_OptionsOrNone(service_proto),
1258
+ file=file_desc,
1259
+ # pylint: disable=protected-access
1260
+ create_key=descriptor._internal_create_key)
1261
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
1262
+ self._service_descriptors[service_name] = desc
1263
+ return desc
1264
+
1265
+ def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
1266
+ index):
1267
+ """Creates a method descriptor from a MethodDescriptorProto.
1268
+
1269
+ Args:
1270
+ method_proto: The proto describing the method.
1271
+ service_name: The name of the containing service.
1272
+ package: Optional package name to look up for types.
1273
+ scope: Scope containing available types.
1274
+ index: Index of the method in the service.
1275
+
1276
+ Returns:
1277
+ An initialized MethodDescriptor object.
1278
+ """
1279
+ full_name = '.'.join((service_name, method_proto.name))
1280
+ input_type = self._GetTypeFromScope(
1281
+ package, method_proto.input_type, scope)
1282
+ output_type = self._GetTypeFromScope(
1283
+ package, method_proto.output_type, scope)
1284
+ return descriptor.MethodDescriptor(
1285
+ name=method_proto.name,
1286
+ full_name=full_name,
1287
+ index=index,
1288
+ containing_service=None,
1289
+ input_type=input_type,
1290
+ output_type=output_type,
1291
+ client_streaming=method_proto.client_streaming,
1292
+ server_streaming=method_proto.server_streaming,
1293
+ options=_OptionsOrNone(method_proto),
1294
+ # pylint: disable=protected-access
1295
+ create_key=descriptor._internal_create_key)
1296
+
1297
+ def _ExtractSymbols(self, descriptors):
1298
+ """Pulls out all the symbols from descriptor protos.
1299
+
1300
+ Args:
1301
+ descriptors: The messages to extract descriptors from.
1302
+ Yields:
1303
+ A two element tuple of the type name and descriptor object.
1304
+ """
1305
+
1306
+ for desc in descriptors:
1307
+ yield (_PrefixWithDot(desc.full_name), desc)
1308
+ for symbol in self._ExtractSymbols(desc.nested_types):
1309
+ yield symbol
1310
+ for enum in desc.enum_types:
1311
+ yield (_PrefixWithDot(enum.full_name), enum)
1312
+
1313
+ def _GetDeps(self, dependencies, visited=None):
1314
+ """Recursively finds dependencies for file protos.
1315
+
1316
+ Args:
1317
+ dependencies: The names of the files being depended on.
1318
+ visited: The names of files already found.
1319
+
1320
+ Yields:
1321
+ Each direct and indirect dependency.
1322
+ """
1323
+
1324
+ visited = visited or set()
1325
+ for dependency in dependencies:
1326
+ if dependency not in visited:
1327
+ visited.add(dependency)
1328
+ dep_desc = self.FindFileByName(dependency)
1329
+ yield dep_desc
1330
+ public_files = [d.name for d in dep_desc.public_dependencies]
1331
+ yield from self._GetDeps(public_files, visited)
1332
+
1333
+ def _GetTypeFromScope(self, package, type_name, scope):
1334
+ """Finds a given type name in the current scope.
1335
+
1336
+ Args:
1337
+ package: The package the proto should be located in.
1338
+ type_name: The name of the type to be found in the scope.
1339
+ scope: Dict mapping short and full symbols to message and enum types.
1340
+
1341
+ Returns:
1342
+ The descriptor for the requested type.
1343
+ """
1344
+ if type_name not in scope:
1345
+ components = _PrefixWithDot(package).split('.')
1346
+ while components:
1347
+ possible_match = '.'.join(components + [type_name])
1348
+ if possible_match in scope:
1349
+ type_name = possible_match
1350
+ break
1351
+ else:
1352
+ components.pop(-1)
1353
+ return scope[type_name]
1354
+
1355
+
1356
+ def _PrefixWithDot(name):
1357
+ return name if name.startswith('.') else '.%s' % name
1358
+
1359
+
1360
+ if _USE_C_DESCRIPTORS:
1361
+ # TODO: This pool could be constructed from Python code, when we
1362
+ # support a flag like 'use_cpp_generated_pool=True'.
1363
+ # pylint: disable=protected-access
1364
+ _DEFAULT = descriptor._message.default_pool
1365
+ else:
1366
+ _DEFAULT = DescriptorPool()
1367
+
1368
+
1369
+ def Default():
1370
+ return _DEFAULT
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/duration.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains the Duration helper APIs."""
9
+
10
+ import datetime
11
+
12
+ from google.protobuf.duration_pb2 import Duration
13
+
14
+
15
+ def from_json_string(value: str) -> Duration:
16
+ """Converts a string to Duration.
17
+
18
+ Args:
19
+ value: A string to be converted. The string must end with 's'. Any
20
+ fractional digits (or none) are accepted as long as they fit into
21
+ precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s"
22
+
23
+ Raises:
24
+ ValueError: On parsing problems.
25
+ """
26
+ duration = Duration()
27
+ duration.FromJsonString(value)
28
+ return duration
29
+
30
+
31
+ def from_microseconds(micros: float) -> Duration:
32
+ """Converts microseconds to Duration."""
33
+ duration = Duration()
34
+ duration.FromMicroseconds(micros)
35
+ return duration
36
+
37
+
38
+ def from_milliseconds(millis: float) -> Duration:
39
+ """Converts milliseconds to Duration."""
40
+ duration = Duration()
41
+ duration.FromMilliseconds(millis)
42
+ return duration
43
+
44
+
45
+ def from_nanoseconds(nanos: float) -> Duration:
46
+ """Converts nanoseconds to Duration."""
47
+ duration = Duration()
48
+ duration.FromNanoseconds(nanos)
49
+ return duration
50
+
51
+
52
+ def from_seconds(seconds: float) -> Duration:
53
+ """Converts seconds to Duration."""
54
+ duration = Duration()
55
+ duration.FromSeconds(seconds)
56
+ return duration
57
+
58
+
59
+ def from_timedelta(td: datetime.timedelta) -> Duration:
60
+ """Converts timedelta to Duration."""
61
+ duration = Duration()
62
+ duration.FromTimedelta(td)
63
+ return duration
64
+
65
+
66
+ def to_json_string(duration: Duration) -> str:
67
+ """Converts Duration to string format.
68
+
69
+ Returns:
70
+ A string converted from self. The string format will contains
71
+ 3, 6, or 9 fractional digits depending on the precision required to
72
+ represent the exact Duration value. For example: "1s", "1.010s",
73
+ "1.000000100s", "-3.100s"
74
+ """
75
+ return duration.ToJsonString()
76
+
77
+
78
+ def to_microseconds(duration: Duration) -> int:
79
+ """Converts a Duration to microseconds."""
80
+ return duration.ToMicroseconds()
81
+
82
+
83
+ def to_milliseconds(duration: Duration) -> int:
84
+ """Converts a Duration to milliseconds."""
85
+ return duration.ToMilliseconds()
86
+
87
+
88
+ def to_nanoseconds(duration: Duration) -> int:
89
+ """Converts a Duration to nanoseconds."""
90
+ return duration.ToNanoseconds()
91
+
92
+
93
+ def to_seconds(duration: Duration) -> int:
94
+ """Converts a Duration to seconds."""
95
+ return duration.ToSeconds()
96
+
97
+
98
+ def to_timedelta(duration: Duration) -> datetime.timedelta:
99
+ """Converts Duration to timedelta."""
100
+ return duration.ToTimedelta()
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/duration_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/duration.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/duration.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\":\n\x08\x44uration\x12\x18\n\x07seconds\x18\x01 \x01(\x03R\x07seconds\x12\x14\n\x05nanos\x18\x02 \x01(\x05R\x05nanosB\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_DURATION']._serialized_start=51
36
+ _globals['_DURATION']._serialized_end=109
37
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/empty_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/empty.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/empty.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_EMPTY']._serialized_start=48
36
+ _globals['_EMPTY']._serialized_end=55
37
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/field_mask_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/field_mask.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/field_mask.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"!\n\tFieldMask\x12\x14\n\x05paths\x18\x01 \x03(\tR\x05pathsB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_FIELDMASK']._serialized_start=53
36
+ _globals['_FIELDMASK']._serialized_end=86
37
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/api_implementation.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Determine which implementation of the protobuf API is used in this process.
9
+ """
10
+
11
+ import importlib
12
+ import os
13
+ import sys
14
+ import warnings
15
+
16
+ _GOOGLE3_PYTHON_UPB_DEFAULT = True
17
+
18
+
19
+ def _ApiVersionToImplementationType(api_version):
20
+ if api_version == 2:
21
+ return 'cpp'
22
+ if api_version == 1:
23
+ raise ValueError('api_version=1 is no longer supported.')
24
+ if api_version == 0:
25
+ return 'python'
26
+ return None
27
+
28
+
29
+ _implementation_type = None
30
+ try:
31
+ # pylint: disable=g-import-not-at-top
32
+ from google.protobuf.internal import _api_implementation
33
+ # The compile-time constants in the _api_implementation module can be used to
34
+ # switch to a certain implementation of the Python API at build time.
35
+ _implementation_type = _ApiVersionToImplementationType(
36
+ _api_implementation.api_version)
37
+ except ImportError:
38
+ pass # Unspecified by compiler flags.
39
+
40
+
41
+ def _CanImport(mod_name):
42
+ try:
43
+ mod = importlib.import_module(mod_name)
44
+ # Work around a known issue in the classic bootstrap .par import hook.
45
+ if not mod:
46
+ raise ImportError(mod_name + ' import succeeded but was None')
47
+ return True
48
+ except ImportError:
49
+ return False
50
+
51
+
52
+ if _implementation_type is None:
53
+ if _CanImport('google._upb._message'):
54
+ _implementation_type = 'upb'
55
+ elif _CanImport('google.protobuf.pyext._message'):
56
+ _implementation_type = 'cpp'
57
+ else:
58
+ _implementation_type = 'python'
59
+
60
+
61
+ # This environment variable can be used to switch to a certain implementation
62
+ # of the Python API, overriding the compile-time constants in the
63
+ # _api_implementation module. Right now only 'python', 'cpp' and 'upb' are
64
+ # valid values. Any other value will raise error.
65
+ _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
66
+ _implementation_type)
67
+
68
+ if _implementation_type not in ('python', 'cpp', 'upb'):
69
+ raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not '
70
+ 'supported. Please set to \'python\', \'cpp\' or '
71
+ '\'upb\'.'.format(_implementation_type))
72
+
73
+ if 'PyPy' in sys.version and _implementation_type == 'cpp':
74
+ warnings.warn('PyPy does not work yet with cpp protocol buffers. '
75
+ 'Falling back to the python implementation.')
76
+ _implementation_type = 'python'
77
+
78
+ _c_module = None
79
+
80
+ if _implementation_type == 'cpp':
81
+ try:
82
+ # pylint: disable=g-import-not-at-top
83
+ from google.protobuf.pyext import _message
84
+ sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message
85
+ _c_module = _message
86
+ del _message
87
+ except ImportError:
88
+ # TODO: fail back to python
89
+ warnings.warn(
90
+ 'Selected implementation cpp is not available.')
91
+ pass
92
+
93
+ if _implementation_type == 'upb':
94
+ try:
95
+ # pylint: disable=g-import-not-at-top
96
+ from google._upb import _message
97
+ _c_module = _message
98
+ del _message
99
+ except ImportError:
100
+ warnings.warn('Selected implementation upb is not available. '
101
+ 'Falling back to the python implementation.')
102
+ _implementation_type = 'python'
103
+ pass
104
+
105
+ # Detect if serialization should be deterministic by default
106
+ try:
107
+ # The presence of this module in a build allows the proto implementation to
108
+ # be upgraded merely via build deps.
109
+ #
110
+ # NOTE: Merely importing this automatically enables deterministic proto
111
+ # serialization for C++ code, but we still need to export it as a boolean so
112
+ # that we can do the same for `_implementation_type == 'python'`.
113
+ #
114
+ # NOTE2: It is possible for C++ code to enable deterministic serialization by
115
+ # default _without_ affecting Python code, if the C++ implementation is not in
116
+ # use by this module. That is intended behavior, so we don't actually expose
117
+ # this boolean outside of this module.
118
+ #
119
+ # pylint: disable=g-import-not-at-top,unused-import
120
+ from google.protobuf import enable_deterministic_proto_serialization
121
+ _python_deterministic_proto_serialization = True
122
+ except ImportError:
123
+ _python_deterministic_proto_serialization = False
124
+
125
+
126
+ # Usage of this function is discouraged. Clients shouldn't care which
127
+ # implementation of the API is in use. Note that there is no guarantee
128
+ # that differences between APIs will be maintained.
129
+ # Please don't use this function if possible.
130
+ def Type():
131
+ return _implementation_type
132
+
133
+
134
+ # For internal use only
135
+ def IsPythonDefaultSerializationDeterministic():
136
+ return _python_deterministic_proto_serialization
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/builder.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Builds descriptors, message classes and services for generated _pb2.py.
9
+
10
+ This file is only called in python generated _pb2.py files. It builds
11
+ descriptors, message classes and services that users can directly use
12
+ in generated code.
13
+ """
14
+
15
+ __author__ = 'jieluo@google.com (Jie Luo)'
16
+
17
+ from google.protobuf.internal import enum_type_wrapper
18
+ from google.protobuf.internal import python_message
19
+ from google.protobuf import message as _message
20
+ from google.protobuf import reflection as _reflection
21
+ from google.protobuf import symbol_database as _symbol_database
22
+
23
+ _sym_db = _symbol_database.Default()
24
+
25
+
26
+ def BuildMessageAndEnumDescriptors(file_des, module):
27
+ """Builds message and enum descriptors.
28
+
29
+ Args:
30
+ file_des: FileDescriptor of the .proto file
31
+ module: Generated _pb2 module
32
+ """
33
+
34
+ def BuildNestedDescriptors(msg_des, prefix):
35
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
36
+ module_name = prefix + name.upper()
37
+ module[module_name] = nested_msg
38
+ BuildNestedDescriptors(nested_msg, module_name + '_')
39
+ for enum_des in msg_des.enum_types:
40
+ module[prefix + enum_des.name.upper()] = enum_des
41
+
42
+ for (name, msg_des) in file_des.message_types_by_name.items():
43
+ module_name = '_' + name.upper()
44
+ module[module_name] = msg_des
45
+ BuildNestedDescriptors(msg_des, module_name + '_')
46
+
47
+
48
+ def BuildTopDescriptorsAndMessages(file_des, module_name, module):
49
+ """Builds top level descriptors and message classes.
50
+
51
+ Args:
52
+ file_des: FileDescriptor of the .proto file
53
+ module_name: str, the name of generated _pb2 module
54
+ module: Generated _pb2 module
55
+ """
56
+
57
+ def BuildMessage(msg_des, prefix):
58
+ create_dict = {}
59
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
60
+ create_dict[name] = BuildMessage(nested_msg, prefix + msg_des.name + '.')
61
+ create_dict['DESCRIPTOR'] = msg_des
62
+ create_dict['__module__'] = module_name
63
+ create_dict['__qualname__'] = prefix + msg_des.name
64
+ message_class = _reflection.GeneratedProtocolMessageType(
65
+ msg_des.name, (_message.Message,), create_dict)
66
+ _sym_db.RegisterMessage(message_class)
67
+ return message_class
68
+
69
+ # top level enums
70
+ for (name, enum_des) in file_des.enum_types_by_name.items():
71
+ module['_' + name.upper()] = enum_des
72
+ module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
73
+ for enum_value in enum_des.values:
74
+ module[enum_value.name] = enum_value.number
75
+
76
+ # top level extensions
77
+ for (name, extension_des) in file_des.extensions_by_name.items():
78
+ module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
79
+ module[name] = extension_des
80
+
81
+ # services
82
+ for (name, service) in file_des.services_by_name.items():
83
+ module['_' + name.upper()] = service
84
+
85
+ # Build messages.
86
+ for (name, msg_des) in file_des.message_types_by_name.items():
87
+ module[name] = BuildMessage(msg_des, '')
88
+
89
+
90
+ def AddHelpersToExtensions(file_des):
91
+ """no-op to keep old generated code work with new runtime.
92
+
93
+ Args:
94
+ file_des: FileDescriptor of the .proto file
95
+ """
96
+ # TODO: Remove this on-op
97
+ return
98
+
99
+
100
+ def BuildServices(file_des, module_name, module):
101
+ """Builds services classes and services stub class.
102
+
103
+ Args:
104
+ file_des: FileDescriptor of the .proto file
105
+ module_name: str, the name of generated _pb2 module
106
+ module: Generated _pb2 module
107
+ """
108
+ # pylint: disable=g-import-not-at-top
109
+ from google.protobuf import service_reflection
110
+ # pylint: enable=g-import-not-at-top
111
+ for (name, service) in file_des.services_by_name.items():
112
+ module[name] = service_reflection.GeneratedServiceType(
113
+ name, (),
114
+ dict(DESCRIPTOR=service, __module__=module_name))
115
+ stub_name = name + '_Stub'
116
+ module[stub_name] = service_reflection.GeneratedServiceStubType(
117
+ stub_name, (module[name],),
118
+ dict(DESCRIPTOR=service, __module__=module_name))
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/containers.py ADDED
@@ -0,0 +1,690 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains container classes to represent different protocol buffer types.
9
+
10
+ This file defines container classes which represent categories of protocol
11
+ buffer field types which need extra maintenance. Currently these categories
12
+ are:
13
+
14
+ - Repeated scalar fields - These are all repeated fields which aren't
15
+ composite (e.g. they are of simple types like int32, string, etc).
16
+ - Repeated composite fields - Repeated fields which are composite. This
17
+ includes groups and nested messages.
18
+ """
19
+
20
+ import collections.abc
21
+ import copy
22
+ import pickle
23
+ from typing import (
24
+ Any,
25
+ Iterable,
26
+ Iterator,
27
+ List,
28
+ MutableMapping,
29
+ MutableSequence,
30
+ NoReturn,
31
+ Optional,
32
+ Sequence,
33
+ TypeVar,
34
+ Union,
35
+ overload,
36
+ )
37
+
38
+
39
+ _T = TypeVar('_T')
40
+ _K = TypeVar('_K')
41
+ _V = TypeVar('_V')
42
+
43
+
44
+ class BaseContainer(Sequence[_T]):
45
+ """Base container class."""
46
+
47
+ # Minimizes memory usage and disallows assignment to other attributes.
48
+ __slots__ = ['_message_listener', '_values']
49
+
50
+ def __init__(self, message_listener: Any) -> None:
51
+ """
52
+ Args:
53
+ message_listener: A MessageListener implementation.
54
+ The RepeatedScalarFieldContainer will call this object's
55
+ Modified() method when it is modified.
56
+ """
57
+ self._message_listener = message_listener
58
+ self._values = []
59
+
60
+ @overload
61
+ def __getitem__(self, key: int) -> _T:
62
+ ...
63
+
64
+ @overload
65
+ def __getitem__(self, key: slice) -> List[_T]:
66
+ ...
67
+
68
+ def __getitem__(self, key):
69
+ """Retrieves item by the specified key."""
70
+ return self._values[key]
71
+
72
+ def __len__(self) -> int:
73
+ """Returns the number of elements in the container."""
74
+ return len(self._values)
75
+
76
+ def __ne__(self, other: Any) -> bool:
77
+ """Checks if another instance isn't equal to this one."""
78
+ # The concrete classes should define __eq__.
79
+ return not self == other
80
+
81
+ __hash__ = None
82
+
83
+ def __repr__(self) -> str:
84
+ return repr(self._values)
85
+
86
+ def sort(self, *args, **kwargs) -> None:
87
+ # Continue to support the old sort_function keyword argument.
88
+ # This is expected to be a rare occurrence, so use LBYL to avoid
89
+ # the overhead of actually catching KeyError.
90
+ if 'sort_function' in kwargs:
91
+ kwargs['cmp'] = kwargs.pop('sort_function')
92
+ self._values.sort(*args, **kwargs)
93
+
94
+ def reverse(self) -> None:
95
+ self._values.reverse()
96
+
97
+
98
+ # TODO: Remove this. BaseContainer does *not* conform to
99
+ # MutableSequence, only its subclasses do.
100
+ collections.abc.MutableSequence.register(BaseContainer)
101
+
102
+
103
+ class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
104
+ """Simple, type-checked, list-like container for holding repeated scalars."""
105
+
106
+ # Disallows assignment to other attributes.
107
+ __slots__ = ['_type_checker']
108
+
109
+ def __init__(
110
+ self,
111
+ message_listener: Any,
112
+ type_checker: Any,
113
+ ) -> None:
114
+ """Args:
115
+
116
+ message_listener: A MessageListener implementation. The
117
+ RepeatedScalarFieldContainer will call this object's Modified() method
118
+ when it is modified.
119
+ type_checker: A type_checkers.ValueChecker instance to run on elements
120
+ inserted into this container.
121
+ """
122
+ super().__init__(message_listener)
123
+ self._type_checker = type_checker
124
+
125
+ def append(self, value: _T) -> None:
126
+ """Appends an item to the list. Similar to list.append()."""
127
+ self._values.append(self._type_checker.CheckValue(value))
128
+ if not self._message_listener.dirty:
129
+ self._message_listener.Modified()
130
+
131
+ def insert(self, key: int, value: _T) -> None:
132
+ """Inserts the item at the specified position. Similar to list.insert()."""
133
+ self._values.insert(key, self._type_checker.CheckValue(value))
134
+ if not self._message_listener.dirty:
135
+ self._message_listener.Modified()
136
+
137
+ def extend(self, elem_seq: Iterable[_T]) -> None:
138
+ """Extends by appending the given iterable. Similar to list.extend()."""
139
+ elem_seq_iter = iter(elem_seq)
140
+ new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
141
+ if new_values:
142
+ self._values.extend(new_values)
143
+ self._message_listener.Modified()
144
+
145
+ def MergeFrom(
146
+ self,
147
+ other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
148
+ ) -> None:
149
+ """Appends the contents of another repeated field of the same type to this
150
+ one. We do not check the types of the individual fields.
151
+ """
152
+ self._values.extend(other)
153
+ self._message_listener.Modified()
154
+
155
+ def remove(self, elem: _T):
156
+ """Removes an item from the list. Similar to list.remove()."""
157
+ self._values.remove(elem)
158
+ self._message_listener.Modified()
159
+
160
+ def pop(self, key: Optional[int] = -1) -> _T:
161
+ """Removes and returns an item at a given index. Similar to list.pop()."""
162
+ value = self._values[key]
163
+ self.__delitem__(key)
164
+ return value
165
+
166
+ @overload
167
+ def __setitem__(self, key: int, value: _T) -> None:
168
+ ...
169
+
170
+ @overload
171
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
172
+ ...
173
+
174
+ def __setitem__(self, key, value) -> None:
175
+ """Sets the item on the specified position."""
176
+ if isinstance(key, slice):
177
+ if key.step is not None:
178
+ raise ValueError('Extended slices not supported')
179
+ self._values[key] = map(self._type_checker.CheckValue, value)
180
+ self._message_listener.Modified()
181
+ else:
182
+ self._values[key] = self._type_checker.CheckValue(value)
183
+ self._message_listener.Modified()
184
+
185
+ def __delitem__(self, key: Union[int, slice]) -> None:
186
+ """Deletes the item at the specified position."""
187
+ del self._values[key]
188
+ self._message_listener.Modified()
189
+
190
+ def __eq__(self, other: Any) -> bool:
191
+ """Compares the current instance with another one."""
192
+ if self is other:
193
+ return True
194
+ # Special case for the same type which should be common and fast.
195
+ if isinstance(other, self.__class__):
196
+ return other._values == self._values
197
+ # We are presumably comparing against some other sequence type.
198
+ return other == self._values
199
+
200
+ def __deepcopy__(
201
+ self,
202
+ unused_memo: Any = None,
203
+ ) -> 'RepeatedScalarFieldContainer[_T]':
204
+ clone = RepeatedScalarFieldContainer(
205
+ copy.deepcopy(self._message_listener), self._type_checker)
206
+ clone.MergeFrom(self)
207
+ return clone
208
+
209
+ def __reduce__(self, **kwargs) -> NoReturn:
210
+ raise pickle.PickleError(
211
+ "Can't pickle repeated scalar fields, convert to list first")
212
+
213
+
214
+ # TODO: Constrain T to be a subtype of Message.
215
+ class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
216
+ """Simple, list-like container for holding repeated composite fields."""
217
+
218
+ # Disallows assignment to other attributes.
219
+ __slots__ = ['_message_descriptor']
220
+
221
+ def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
222
+ """
223
+ Note that we pass in a descriptor instead of the generated directly,
224
+ since at the time we construct a _RepeatedCompositeFieldContainer we
225
+ haven't yet necessarily initialized the type that will be contained in the
226
+ container.
227
+
228
+ Args:
229
+ message_listener: A MessageListener implementation.
230
+ The RepeatedCompositeFieldContainer will call this object's
231
+ Modified() method when it is modified.
232
+ message_descriptor: A Descriptor instance describing the protocol type
233
+ that should be present in this container. We'll use the
234
+ _concrete_class field of this descriptor when the client calls add().
235
+ """
236
+ super().__init__(message_listener)
237
+ self._message_descriptor = message_descriptor
238
+
239
+ def add(self, **kwargs: Any) -> _T:
240
+ """Adds a new element at the end of the list and returns it. Keyword
241
+ arguments may be used to initialize the element.
242
+ """
243
+ new_element = self._message_descriptor._concrete_class(**kwargs)
244
+ new_element._SetListener(self._message_listener)
245
+ self._values.append(new_element)
246
+ if not self._message_listener.dirty:
247
+ self._message_listener.Modified()
248
+ return new_element
249
+
250
+ def append(self, value: _T) -> None:
251
+ """Appends one element by copying the message."""
252
+ new_element = self._message_descriptor._concrete_class()
253
+ new_element._SetListener(self._message_listener)
254
+ new_element.CopyFrom(value)
255
+ self._values.append(new_element)
256
+ if not self._message_listener.dirty:
257
+ self._message_listener.Modified()
258
+
259
+ def insert(self, key: int, value: _T) -> None:
260
+ """Inserts the item at the specified position by copying."""
261
+ new_element = self._message_descriptor._concrete_class()
262
+ new_element._SetListener(self._message_listener)
263
+ new_element.CopyFrom(value)
264
+ self._values.insert(key, new_element)
265
+ if not self._message_listener.dirty:
266
+ self._message_listener.Modified()
267
+
268
+ def extend(self, elem_seq: Iterable[_T]) -> None:
269
+ """Extends by appending the given sequence of elements of the same type
270
+
271
+ as this one, copying each individual message.
272
+ """
273
+ message_class = self._message_descriptor._concrete_class
274
+ listener = self._message_listener
275
+ values = self._values
276
+ for message in elem_seq:
277
+ new_element = message_class()
278
+ new_element._SetListener(listener)
279
+ new_element.MergeFrom(message)
280
+ values.append(new_element)
281
+ listener.Modified()
282
+
283
+ def MergeFrom(
284
+ self,
285
+ other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
286
+ ) -> None:
287
+ """Appends the contents of another repeated field of the same type to this
288
+ one, copying each individual message.
289
+ """
290
+ self.extend(other)
291
+
292
+ def remove(self, elem: _T) -> None:
293
+ """Removes an item from the list. Similar to list.remove()."""
294
+ self._values.remove(elem)
295
+ self._message_listener.Modified()
296
+
297
+ def pop(self, key: Optional[int] = -1) -> _T:
298
+ """Removes and returns an item at a given index. Similar to list.pop()."""
299
+ value = self._values[key]
300
+ self.__delitem__(key)
301
+ return value
302
+
303
+ @overload
304
+ def __setitem__(self, key: int, value: _T) -> None:
305
+ ...
306
+
307
+ @overload
308
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
309
+ ...
310
+
311
+ def __setitem__(self, key, value):
312
+ # This method is implemented to make RepeatedCompositeFieldContainer
313
+ # structurally compatible with typing.MutableSequence. It is
314
+ # otherwise unsupported and will always raise an error.
315
+ raise TypeError(
316
+ f'{self.__class__.__name__} object does not support item assignment')
317
+
318
+ def __delitem__(self, key: Union[int, slice]) -> None:
319
+ """Deletes the item at the specified position."""
320
+ del self._values[key]
321
+ self._message_listener.Modified()
322
+
323
+ def __eq__(self, other: Any) -> bool:
324
+ """Compares the current instance with another one."""
325
+ if self is other:
326
+ return True
327
+ if not isinstance(other, self.__class__):
328
+ raise TypeError('Can only compare repeated composite fields against '
329
+ 'other repeated composite fields.')
330
+ return self._values == other._values
331
+
332
+
333
+ class ScalarMap(MutableMapping[_K, _V]):
334
+ """Simple, type-checked, dict-like container for holding repeated scalars."""
335
+
336
+ # Disallows assignment to other attributes.
337
+ __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
338
+ '_entry_descriptor']
339
+
340
+ def __init__(
341
+ self,
342
+ message_listener: Any,
343
+ key_checker: Any,
344
+ value_checker: Any,
345
+ entry_descriptor: Any,
346
+ ) -> None:
347
+ """
348
+ Args:
349
+ message_listener: A MessageListener implementation.
350
+ The ScalarMap will call this object's Modified() method when it
351
+ is modified.
352
+ key_checker: A type_checkers.ValueChecker instance to run on keys
353
+ inserted into this container.
354
+ value_checker: A type_checkers.ValueChecker instance to run on values
355
+ inserted into this container.
356
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
357
+ """
358
+ self._message_listener = message_listener
359
+ self._key_checker = key_checker
360
+ self._value_checker = value_checker
361
+ self._entry_descriptor = entry_descriptor
362
+ self._values = {}
363
+
364
+ def __getitem__(self, key: _K) -> _V:
365
+ try:
366
+ return self._values[key]
367
+ except KeyError:
368
+ key = self._key_checker.CheckValue(key)
369
+ val = self._value_checker.DefaultValue()
370
+ self._values[key] = val
371
+ return val
372
+
373
+ def __contains__(self, item: _K) -> bool:
374
+ # We check the key's type to match the strong-typing flavor of the API.
375
+ # Also this makes it easier to match the behavior of the C++ implementation.
376
+ self._key_checker.CheckValue(item)
377
+ return item in self._values
378
+
379
+ @overload
380
+ def get(self, key: _K) -> Optional[_V]:
381
+ ...
382
+
383
+ @overload
384
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
385
+ ...
386
+
387
+ # We need to override this explicitly, because our defaultdict-like behavior
388
+ # will make the default implementation (from our base class) always insert
389
+ # the key.
390
+ def get(self, key, default=None):
391
+ if key in self:
392
+ return self[key]
393
+ else:
394
+ return default
395
+
396
+ def __setitem__(self, key: _K, value: _V) -> _T:
397
+ checked_key = self._key_checker.CheckValue(key)
398
+ checked_value = self._value_checker.CheckValue(value)
399
+ self._values[checked_key] = checked_value
400
+ self._message_listener.Modified()
401
+
402
+ def __delitem__(self, key: _K) -> None:
403
+ del self._values[key]
404
+ self._message_listener.Modified()
405
+
406
+ def __len__(self) -> int:
407
+ return len(self._values)
408
+
409
+ def __iter__(self) -> Iterator[_K]:
410
+ return iter(self._values)
411
+
412
+ def __repr__(self) -> str:
413
+ return repr(self._values)
414
+
415
+ def setdefault(self, key: _K, value: Optional[_V] = None) -> _V:
416
+ if value == None:
417
+ raise ValueError('The value for scalar map setdefault must be set.')
418
+ if key not in self._values:
419
+ self.__setitem__(key, value)
420
+ return self[key]
421
+
422
+ def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
423
+ self._values.update(other._values)
424
+ self._message_listener.Modified()
425
+
426
+ def InvalidateIterators(self) -> None:
427
+ # It appears that the only way to reliably invalidate iterators to
428
+ # self._values is to ensure that its size changes.
429
+ original = self._values
430
+ self._values = original.copy()
431
+ original[None] = None
432
+
433
+ # This is defined in the abstract base, but we can do it much more cheaply.
434
+ def clear(self) -> None:
435
+ self._values.clear()
436
+ self._message_listener.Modified()
437
+
438
+ def GetEntryClass(self) -> Any:
439
+ return self._entry_descriptor._concrete_class
440
+
441
+
442
+ class MessageMap(MutableMapping[_K, _V]):
443
+ """Simple, type-checked, dict-like container for with submessage values."""
444
+
445
+ # Disallows assignment to other attributes.
446
+ __slots__ = ['_key_checker', '_values', '_message_listener',
447
+ '_message_descriptor', '_entry_descriptor']
448
+
449
+ def __init__(
450
+ self,
451
+ message_listener: Any,
452
+ message_descriptor: Any,
453
+ key_checker: Any,
454
+ entry_descriptor: Any,
455
+ ) -> None:
456
+ """
457
+ Args:
458
+ message_listener: A MessageListener implementation.
459
+ The ScalarMap will call this object's Modified() method when it
460
+ is modified.
461
+ key_checker: A type_checkers.ValueChecker instance to run on keys
462
+ inserted into this container.
463
+ value_checker: A type_checkers.ValueChecker instance to run on values
464
+ inserted into this container.
465
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
466
+ """
467
+ self._message_listener = message_listener
468
+ self._message_descriptor = message_descriptor
469
+ self._key_checker = key_checker
470
+ self._entry_descriptor = entry_descriptor
471
+ self._values = {}
472
+
473
+ def __getitem__(self, key: _K) -> _V:
474
+ key = self._key_checker.CheckValue(key)
475
+ try:
476
+ return self._values[key]
477
+ except KeyError:
478
+ new_element = self._message_descriptor._concrete_class()
479
+ new_element._SetListener(self._message_listener)
480
+ self._values[key] = new_element
481
+ self._message_listener.Modified()
482
+ return new_element
483
+
484
+ def get_or_create(self, key: _K) -> _V:
485
+ """get_or_create() is an alias for getitem (ie. map[key]).
486
+
487
+ Args:
488
+ key: The key to get or create in the map.
489
+
490
+ This is useful in cases where you want to be explicit that the call is
491
+ mutating the map. This can avoid lint errors for statements like this
492
+ that otherwise would appear to be pointless statements:
493
+
494
+ msg.my_map[key]
495
+ """
496
+ return self[key]
497
+
498
+ @overload
499
+ def get(self, key: _K) -> Optional[_V]:
500
+ ...
501
+
502
+ @overload
503
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
504
+ ...
505
+
506
+ # We need to override this explicitly, because our defaultdict-like behavior
507
+ # will make the default implementation (from our base class) always insert
508
+ # the key.
509
+ def get(self, key, default=None):
510
+ if key in self:
511
+ return self[key]
512
+ else:
513
+ return default
514
+
515
+ def __contains__(self, item: _K) -> bool:
516
+ item = self._key_checker.CheckValue(item)
517
+ return item in self._values
518
+
519
+ def __setitem__(self, key: _K, value: _V) -> NoReturn:
520
+ raise ValueError('May not set values directly, call my_map[key].foo = 5')
521
+
522
+ def __delitem__(self, key: _K) -> None:
523
+ key = self._key_checker.CheckValue(key)
524
+ del self._values[key]
525
+ self._message_listener.Modified()
526
+
527
+ def __len__(self) -> int:
528
+ return len(self._values)
529
+
530
+ def __iter__(self) -> Iterator[_K]:
531
+ return iter(self._values)
532
+
533
+ def __repr__(self) -> str:
534
+ return repr(self._values)
535
+
536
+ def setdefault(self, key: _K, value: Optional[_V] = None) -> _V:
537
+ raise NotImplementedError(
538
+ 'Set message map value directly is not supported, call'
539
+ ' my_map[key].foo = 5'
540
+ )
541
+
542
+ def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
543
+ # pylint: disable=protected-access
544
+ for key in other._values:
545
+ # According to documentation: "When parsing from the wire or when merging,
546
+ # if there are duplicate map keys the last key seen is used".
547
+ if key in self:
548
+ del self[key]
549
+ self[key].CopyFrom(other[key])
550
+ # self._message_listener.Modified() not required here, because
551
+ # mutations to submessages already propagate.
552
+
553
+ def InvalidateIterators(self) -> None:
554
+ # It appears that the only way to reliably invalidate iterators to
555
+ # self._values is to ensure that its size changes.
556
+ original = self._values
557
+ self._values = original.copy()
558
+ original[None] = None
559
+
560
+ # This is defined in the abstract base, but we can do it much more cheaply.
561
+ def clear(self) -> None:
562
+ self._values.clear()
563
+ self._message_listener.Modified()
564
+
565
+ def GetEntryClass(self) -> Any:
566
+ return self._entry_descriptor._concrete_class
567
+
568
+
569
+ class _UnknownField:
570
+ """A parsed unknown field."""
571
+
572
+ # Disallows assignment to other attributes.
573
+ __slots__ = ['_field_number', '_wire_type', '_data']
574
+
575
+ def __init__(self, field_number, wire_type, data):
576
+ self._field_number = field_number
577
+ self._wire_type = wire_type
578
+ self._data = data
579
+ return
580
+
581
+ def __lt__(self, other):
582
+ # pylint: disable=protected-access
583
+ return self._field_number < other._field_number
584
+
585
+ def __eq__(self, other):
586
+ if self is other:
587
+ return True
588
+ # pylint: disable=protected-access
589
+ return (self._field_number == other._field_number and
590
+ self._wire_type == other._wire_type and
591
+ self._data == other._data)
592
+
593
+
594
+ class UnknownFieldRef: # pylint: disable=missing-class-docstring
595
+
596
+ def __init__(self, parent, index):
597
+ self._parent = parent
598
+ self._index = index
599
+
600
+ def _check_valid(self):
601
+ if not self._parent:
602
+ raise ValueError('UnknownField does not exist. '
603
+ 'The parent message might be cleared.')
604
+ if self._index >= len(self._parent):
605
+ raise ValueError('UnknownField does not exist. '
606
+ 'The parent message might be cleared.')
607
+
608
+ @property
609
+ def field_number(self):
610
+ self._check_valid()
611
+ # pylint: disable=protected-access
612
+ return self._parent._internal_get(self._index)._field_number
613
+
614
+ @property
615
+ def wire_type(self):
616
+ self._check_valid()
617
+ # pylint: disable=protected-access
618
+ return self._parent._internal_get(self._index)._wire_type
619
+
620
+ @property
621
+ def data(self):
622
+ self._check_valid()
623
+ # pylint: disable=protected-access
624
+ return self._parent._internal_get(self._index)._data
625
+
626
+
627
+ class UnknownFieldSet:
628
+ """UnknownField container"""
629
+
630
+ # Disallows assignment to other attributes.
631
+ __slots__ = ['_values']
632
+
633
+ def __init__(self):
634
+ self._values = []
635
+
636
+ def __getitem__(self, index):
637
+ if self._values is None:
638
+ raise ValueError('UnknownFields does not exist. '
639
+ 'The parent message might be cleared.')
640
+ size = len(self._values)
641
+ if index < 0:
642
+ index += size
643
+ if index < 0 or index >= size:
644
+ raise IndexError('index %d out of range'.index)
645
+
646
+ return UnknownFieldRef(self, index)
647
+
648
+ def _internal_get(self, index):
649
+ return self._values[index]
650
+
651
+ def __len__(self):
652
+ if self._values is None:
653
+ raise ValueError('UnknownFields does not exist. '
654
+ 'The parent message might be cleared.')
655
+ return len(self._values)
656
+
657
+ def _add(self, field_number, wire_type, data):
658
+ unknown_field = _UnknownField(field_number, wire_type, data)
659
+ self._values.append(unknown_field)
660
+ return unknown_field
661
+
662
+ def __iter__(self):
663
+ for i in range(len(self)):
664
+ yield UnknownFieldRef(self, i)
665
+
666
+ def _extend(self, other):
667
+ if other is None:
668
+ return
669
+ # pylint: disable=protected-access
670
+ self._values.extend(other._values)
671
+
672
+ def __eq__(self, other):
673
+ if self is other:
674
+ return True
675
+ # Sort unknown fields because their order shouldn't
676
+ # affect equality test.
677
+ values = list(self._values)
678
+ if other is None:
679
+ return not values
680
+ values.sort()
681
+ # pylint: disable=protected-access
682
+ other_values = sorted(other._values)
683
+ return values == other_values
684
+
685
+ def _clear(self):
686
+ for value in self._values:
687
+ # pylint: disable=protected-access
688
+ if isinstance(value._data, UnknownFieldSet):
689
+ value._data._clear() # pylint: disable=protected-access
690
+ self._values = None
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/decoder.py ADDED
@@ -0,0 +1,1066 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Code for decoding protocol buffer primitives.
9
+
10
+ This code is very similar to encoder.py -- read the docs for that module first.
11
+
12
+ A "decoder" is a function with the signature:
13
+ Decode(buffer, pos, end, message, field_dict)
14
+ The arguments are:
15
+ buffer: The string containing the encoded message.
16
+ pos: The current position in the string.
17
+ end: The position in the string where the current message ends. May be
18
+ less than len(buffer) if we're reading a sub-message.
19
+ message: The message object into which we're parsing.
20
+ field_dict: message._fields (avoids a hashtable lookup).
21
+ The decoder reads the field and stores it into field_dict, returning the new
22
+ buffer position. A decoder for a repeated field may proactively decode all of
23
+ the elements of that field, if they appear consecutively.
24
+
25
+ Note that decoders may throw any of the following:
26
+ IndexError: Indicates a truncated message.
27
+ struct.error: Unpacking of a fixed-width field failed.
28
+ message.DecodeError: Other errors.
29
+
30
+ Decoders are expected to raise an exception if they are called with pos > end.
31
+ This allows callers to be lax about bounds checking: it's fineto read past
32
+ "end" as long as you are sure that someone else will notice and throw an
33
+ exception later on.
34
+
35
+ Something up the call stack is expected to catch IndexError and struct.error
36
+ and convert them to message.DecodeError.
37
+
38
+ Decoders are constructed using decoder constructors with the signature:
39
+ MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
40
+ The arguments are:
41
+ field_number: The field number of the field we want to decode.
42
+ is_repeated: Is the field a repeated field? (bool)
43
+ is_packed: Is the field a packed field? (bool)
44
+ key: The key to use when looking up the field within field_dict.
45
+ (This is actually the FieldDescriptor but nothing in this
46
+ file should depend on that.)
47
+ new_default: A function which takes a message object as a parameter and
48
+ returns a new instance of the default value for this field.
49
+ (This is called for repeated fields and sub-messages, when an
50
+ instance does not already exist.)
51
+
52
+ As with encoders, we define a decoder constructor for every type of field.
53
+ Then, for every field of every message class we construct an actual decoder.
54
+ That decoder goes into a dict indexed by tag, so when we decode a message
55
+ we repeatedly read a tag, look up the corresponding decoder, and invoke it.
56
+ """
57
+
58
+ __author__ = 'kenton@google.com (Kenton Varda)'
59
+
60
+ import math
61
+ import numbers
62
+ import struct
63
+
64
+ from google.protobuf import message
65
+ from google.protobuf.internal import containers
66
+ from google.protobuf.internal import encoder
67
+ from google.protobuf.internal import wire_format
68
+
69
+
70
+ # This is not for optimization, but rather to avoid conflicts with local
71
+ # variables named "message".
72
+ _DecodeError = message.DecodeError
73
+
74
+
75
+ def IsDefaultScalarValue(value):
76
+ """Returns whether or not a scalar value is the default value of its type.
77
+
78
+ Specifically, this should be used to determine presence of implicit-presence
79
+ fields, where we disallow custom defaults.
80
+
81
+ Args:
82
+ value: A scalar value to check.
83
+
84
+ Returns:
85
+ True if the value is equivalent to a default value, False otherwise.
86
+ """
87
+ if isinstance(value, numbers.Number) and math.copysign(1.0, value) < 0:
88
+ # Special case for negative zero, where "truthiness" fails to give the right
89
+ # answer.
90
+ return False
91
+
92
+ # Normally, we can just use Python's boolean conversion.
93
+ return not value
94
+
95
+
96
+ def _VarintDecoder(mask, result_type):
97
+ """Return an encoder for a basic varint value (does not include tag).
98
+
99
+ Decoded values will be bitwise-anded with the given mask before being
100
+ returned, e.g. to limit them to 32 bits. The returned decoder does not
101
+ take the usual "end" parameter -- the caller is expected to do bounds checking
102
+ after the fact (often the caller can defer such checking until later). The
103
+ decoder returns a (value, new_pos) pair.
104
+ """
105
+
106
+ def DecodeVarint(buffer, pos: int=None):
107
+ result = 0
108
+ shift = 0
109
+ while 1:
110
+ if pos is None:
111
+ # Read from BytesIO
112
+ try:
113
+ b = buffer.read(1)[0]
114
+ except IndexError as e:
115
+ if shift == 0:
116
+ # End of BytesIO.
117
+ return None
118
+ else:
119
+ raise ValueError('Fail to read varint %s' % str(e))
120
+ else:
121
+ b = buffer[pos]
122
+ pos += 1
123
+ result |= ((b & 0x7f) << shift)
124
+ if not (b & 0x80):
125
+ result &= mask
126
+ result = result_type(result)
127
+ return result if pos is None else (result, pos)
128
+ shift += 7
129
+ if shift >= 64:
130
+ raise _DecodeError('Too many bytes when decoding varint.')
131
+
132
+ return DecodeVarint
133
+
134
+
135
+ def _SignedVarintDecoder(bits, result_type):
136
+ """Like _VarintDecoder() but decodes signed values."""
137
+
138
+ signbit = 1 << (bits - 1)
139
+ mask = (1 << bits) - 1
140
+
141
+ def DecodeVarint(buffer, pos):
142
+ result = 0
143
+ shift = 0
144
+ while 1:
145
+ b = buffer[pos]
146
+ result |= ((b & 0x7f) << shift)
147
+ pos += 1
148
+ if not (b & 0x80):
149
+ result &= mask
150
+ result = (result ^ signbit) - signbit
151
+ result = result_type(result)
152
+ return (result, pos)
153
+ shift += 7
154
+ if shift >= 64:
155
+ raise _DecodeError('Too many bytes when decoding varint.')
156
+ return DecodeVarint
157
+
158
+ # All 32-bit and 64-bit values are represented as int.
159
+ _DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
160
+ _DecodeSignedVarint = _SignedVarintDecoder(64, int)
161
+
162
+ # Use these versions for values which must be limited to 32 bits.
163
+ _DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
164
+ _DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
165
+
166
+
167
+ def ReadTag(buffer, pos):
168
+ """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple.
169
+
170
+ We return the raw bytes of the tag rather than decoding them. The raw
171
+ bytes can then be used to look up the proper decoder. This effectively allows
172
+ us to trade some work that would be done in pure-python (decoding a varint)
173
+ for work that is done in C (searching for a byte string in a hash table).
174
+ In a low-level language it would be much cheaper to decode the varint and
175
+ use that, but not in Python.
176
+
177
+ Args:
178
+ buffer: memoryview object of the encoded bytes
179
+ pos: int of the current position to start from
180
+
181
+ Returns:
182
+ Tuple[bytes, int] of the tag data and new position.
183
+ """
184
+ start = pos
185
+ while buffer[pos] & 0x80:
186
+ pos += 1
187
+ pos += 1
188
+
189
+ tag_bytes = buffer[start:pos].tobytes()
190
+ return tag_bytes, pos
191
+
192
+
193
+ def DecodeTag(tag_bytes):
194
+ """Decode a tag from the bytes.
195
+
196
+ Args:
197
+ tag_bytes: the bytes of the tag
198
+
199
+ Returns:
200
+ Tuple[int, int] of the tag field number and wire type.
201
+ """
202
+ (tag, _) = _DecodeVarint(tag_bytes, 0)
203
+ return wire_format.UnpackTag(tag)
204
+
205
+
206
+ # --------------------------------------------------------------------
207
+
208
+
209
+ def _SimpleDecoder(wire_type, decode_value):
210
+ """Return a constructor for a decoder for fields of a particular type.
211
+
212
+ Args:
213
+ wire_type: The field's wire type.
214
+ decode_value: A function which decodes an individual value, e.g.
215
+ _DecodeVarint()
216
+ """
217
+
218
+ def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default,
219
+ clear_if_default=False):
220
+ if is_packed:
221
+ local_DecodeVarint = _DecodeVarint
222
+ def DecodePackedField(
223
+ buffer, pos, end, message, field_dict, current_depth=0
224
+ ):
225
+ del current_depth # unused
226
+ value = field_dict.get(key)
227
+ if value is None:
228
+ value = field_dict.setdefault(key, new_default(message))
229
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
230
+ endpoint += pos
231
+ if endpoint > end:
232
+ raise _DecodeError('Truncated message.')
233
+ while pos < endpoint:
234
+ (element, pos) = decode_value(buffer, pos)
235
+ value.append(element)
236
+ if pos > endpoint:
237
+ del value[-1] # Discard corrupt value.
238
+ raise _DecodeError('Packed element was truncated.')
239
+ return pos
240
+
241
+ return DecodePackedField
242
+ elif is_repeated:
243
+ tag_bytes = encoder.TagBytes(field_number, wire_type)
244
+ tag_len = len(tag_bytes)
245
+ def DecodeRepeatedField(
246
+ buffer, pos, end, message, field_dict, current_depth=0
247
+ ):
248
+ del current_depth # unused
249
+ value = field_dict.get(key)
250
+ if value is None:
251
+ value = field_dict.setdefault(key, new_default(message))
252
+ while 1:
253
+ (element, new_pos) = decode_value(buffer, pos)
254
+ value.append(element)
255
+ # Predict that the next tag is another copy of the same repeated
256
+ # field.
257
+ pos = new_pos + tag_len
258
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
259
+ # Prediction failed. Return.
260
+ if new_pos > end:
261
+ raise _DecodeError('Truncated message.')
262
+ return new_pos
263
+
264
+ return DecodeRepeatedField
265
+ else:
266
+
267
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
268
+ del current_depth # unused
269
+ (new_value, pos) = decode_value(buffer, pos)
270
+ if pos > end:
271
+ raise _DecodeError('Truncated message.')
272
+ if clear_if_default and IsDefaultScalarValue(new_value):
273
+ field_dict.pop(key, None)
274
+ else:
275
+ field_dict[key] = new_value
276
+ return pos
277
+
278
+ return DecodeField
279
+
280
+ return SpecificDecoder
281
+
282
+
283
+ def _ModifiedDecoder(wire_type, decode_value, modify_value):
284
+ """Like SimpleDecoder but additionally invokes modify_value on every value
285
+ before storing it. Usually modify_value is ZigZagDecode.
286
+ """
287
+
288
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
289
+ # not enough to make a significant difference.
290
+
291
+ def InnerDecode(buffer, pos):
292
+ (result, new_pos) = decode_value(buffer, pos)
293
+ return (modify_value(result), new_pos)
294
+ return _SimpleDecoder(wire_type, InnerDecode)
295
+
296
+
297
+ def _StructPackDecoder(wire_type, format):
298
+ """Return a constructor for a decoder for a fixed-width field.
299
+
300
+ Args:
301
+ wire_type: The field's wire type.
302
+ format: The format string to pass to struct.unpack().
303
+ """
304
+
305
+ value_size = struct.calcsize(format)
306
+ local_unpack = struct.unpack
307
+
308
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
309
+ # not enough to make a significant difference.
310
+
311
+ # Note that we expect someone up-stack to catch struct.error and convert
312
+ # it to _DecodeError -- this way we don't have to set up exception-
313
+ # handling blocks every time we parse one value.
314
+
315
+ def InnerDecode(buffer, pos):
316
+ new_pos = pos + value_size
317
+ result = local_unpack(format, buffer[pos:new_pos])[0]
318
+ return (result, new_pos)
319
+ return _SimpleDecoder(wire_type, InnerDecode)
320
+
321
+
322
+ def _FloatDecoder():
323
+ """Returns a decoder for a float field.
324
+
325
+ This code works around a bug in struct.unpack for non-finite 32-bit
326
+ floating-point values.
327
+ """
328
+
329
+ local_unpack = struct.unpack
330
+
331
+ def InnerDecode(buffer, pos):
332
+ """Decode serialized float to a float and new position.
333
+
334
+ Args:
335
+ buffer: memoryview of the serialized bytes
336
+ pos: int, position in the memory view to start at.
337
+
338
+ Returns:
339
+ Tuple[float, int] of the deserialized float value and new position
340
+ in the serialized data.
341
+ """
342
+ # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
343
+ # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
344
+ new_pos = pos + 4
345
+ float_bytes = buffer[pos:new_pos].tobytes()
346
+
347
+ # If this value has all its exponent bits set, then it's non-finite.
348
+ # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
349
+ # To avoid that, we parse it specially.
350
+ if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
351
+ # If at least one significand bit is set...
352
+ if float_bytes[0:3] != b'\x00\x00\x80':
353
+ return (math.nan, new_pos)
354
+ # If sign bit is set...
355
+ if float_bytes[3:4] == b'\xFF':
356
+ return (-math.inf, new_pos)
357
+ return (math.inf, new_pos)
358
+
359
+ # Note that we expect someone up-stack to catch struct.error and convert
360
+ # it to _DecodeError -- this way we don't have to set up exception-
361
+ # handling blocks every time we parse one value.
362
+ result = local_unpack('<f', float_bytes)[0]
363
+ return (result, new_pos)
364
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
365
+
366
+
367
+ def _DoubleDecoder():
368
+ """Returns a decoder for a double field.
369
+
370
+ This code works around a bug in struct.unpack for not-a-number.
371
+ """
372
+
373
+ local_unpack = struct.unpack
374
+
375
+ def InnerDecode(buffer, pos):
376
+ """Decode serialized double to a double and new position.
377
+
378
+ Args:
379
+ buffer: memoryview of the serialized bytes.
380
+ pos: int, position in the memory view to start at.
381
+
382
+ Returns:
383
+ Tuple[float, int] of the decoded double value and new position
384
+ in the serialized data.
385
+ """
386
+ # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
387
+ # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
388
+ new_pos = pos + 8
389
+ double_bytes = buffer[pos:new_pos].tobytes()
390
+
391
+ # If this value has all its exponent bits set and at least one significand
392
+ # bit set, it's not a number. In Python 2.4, struct.unpack will treat it
393
+ # as inf or -inf. To avoid that, we treat it specially.
394
+ if ((double_bytes[7:8] in b'\x7F\xFF')
395
+ and (double_bytes[6:7] >= b'\xF0')
396
+ and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
397
+ return (math.nan, new_pos)
398
+
399
+ # Note that we expect someone up-stack to catch struct.error and convert
400
+ # it to _DecodeError -- this way we don't have to set up exception-
401
+ # handling blocks every time we parse one value.
402
+ result = local_unpack('<d', double_bytes)[0]
403
+ return (result, new_pos)
404
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
405
+
406
+
407
+ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
408
+ clear_if_default=False):
409
+ """Returns a decoder for enum field."""
410
+ enum_type = key.enum_type
411
+ if is_packed:
412
+ local_DecodeVarint = _DecodeVarint
413
+ def DecodePackedField(
414
+ buffer, pos, end, message, field_dict, current_depth=0
415
+ ):
416
+ """Decode serialized packed enum to its value and a new position.
417
+
418
+ Args:
419
+ buffer: memoryview of the serialized bytes.
420
+ pos: int, position in the memory view to start at.
421
+ end: int, end position of serialized data
422
+ message: Message object to store unknown fields in
423
+ field_dict: Map[Descriptor, Any] to store decoded values in.
424
+
425
+ Returns:
426
+ int, new position in serialized data.
427
+ """
428
+ del current_depth # unused
429
+ value = field_dict.get(key)
430
+ if value is None:
431
+ value = field_dict.setdefault(key, new_default(message))
432
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
433
+ endpoint += pos
434
+ if endpoint > end:
435
+ raise _DecodeError('Truncated message.')
436
+ while pos < endpoint:
437
+ value_start_pos = pos
438
+ (element, pos) = _DecodeSignedVarint32(buffer, pos)
439
+ # pylint: disable=protected-access
440
+ if element in enum_type.values_by_number:
441
+ value.append(element)
442
+ else:
443
+ if not message._unknown_fields:
444
+ message._unknown_fields = []
445
+ tag_bytes = encoder.TagBytes(field_number,
446
+ wire_format.WIRETYPE_VARINT)
447
+
448
+ message._unknown_fields.append(
449
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
450
+ # pylint: enable=protected-access
451
+ if pos > endpoint:
452
+ if element in enum_type.values_by_number:
453
+ del value[-1] # Discard corrupt value.
454
+ else:
455
+ del message._unknown_fields[-1]
456
+ # pylint: enable=protected-access
457
+ raise _DecodeError('Packed element was truncated.')
458
+ return pos
459
+
460
+ return DecodePackedField
461
+ elif is_repeated:
462
+ tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
463
+ tag_len = len(tag_bytes)
464
+ def DecodeRepeatedField(
465
+ buffer, pos, end, message, field_dict, current_depth=0
466
+ ):
467
+ """Decode serialized repeated enum to its value and a new position.
468
+
469
+ Args:
470
+ buffer: memoryview of the serialized bytes.
471
+ pos: int, position in the memory view to start at.
472
+ end: int, end position of serialized data
473
+ message: Message object to store unknown fields in
474
+ field_dict: Map[Descriptor, Any] to store decoded values in.
475
+
476
+ Returns:
477
+ int, new position in serialized data.
478
+ """
479
+ del current_depth # unused
480
+ value = field_dict.get(key)
481
+ if value is None:
482
+ value = field_dict.setdefault(key, new_default(message))
483
+ while 1:
484
+ (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
485
+ # pylint: disable=protected-access
486
+ if element in enum_type.values_by_number:
487
+ value.append(element)
488
+ else:
489
+ if not message._unknown_fields:
490
+ message._unknown_fields = []
491
+ message._unknown_fields.append(
492
+ (tag_bytes, buffer[pos:new_pos].tobytes()))
493
+ # pylint: enable=protected-access
494
+ # Predict that the next tag is another copy of the same repeated
495
+ # field.
496
+ pos = new_pos + tag_len
497
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
498
+ # Prediction failed. Return.
499
+ if new_pos > end:
500
+ raise _DecodeError('Truncated message.')
501
+ return new_pos
502
+
503
+ return DecodeRepeatedField
504
+ else:
505
+
506
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
507
+ """Decode serialized repeated enum to its value and a new position.
508
+
509
+ Args:
510
+ buffer: memoryview of the serialized bytes.
511
+ pos: int, position in the memory view to start at.
512
+ end: int, end position of serialized data
513
+ message: Message object to store unknown fields in
514
+ field_dict: Map[Descriptor, Any] to store decoded values in.
515
+
516
+ Returns:
517
+ int, new position in serialized data.
518
+ """
519
+ del current_depth # unused
520
+ value_start_pos = pos
521
+ (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
522
+ if pos > end:
523
+ raise _DecodeError('Truncated message.')
524
+ if clear_if_default and IsDefaultScalarValue(enum_value):
525
+ field_dict.pop(key, None)
526
+ return pos
527
+ # pylint: disable=protected-access
528
+ if enum_value in enum_type.values_by_number:
529
+ field_dict[key] = enum_value
530
+ else:
531
+ if not message._unknown_fields:
532
+ message._unknown_fields = []
533
+ tag_bytes = encoder.TagBytes(field_number,
534
+ wire_format.WIRETYPE_VARINT)
535
+ message._unknown_fields.append(
536
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
537
+ # pylint: enable=protected-access
538
+ return pos
539
+
540
+ return DecodeField
541
+
542
+
543
+ # --------------------------------------------------------------------
544
+
545
+
546
+ Int32Decoder = _SimpleDecoder(
547
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
548
+
549
+ Int64Decoder = _SimpleDecoder(
550
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
551
+
552
+ UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
553
+ UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
554
+
555
+ SInt32Decoder = _ModifiedDecoder(
556
+ wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
557
+ SInt64Decoder = _ModifiedDecoder(
558
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
559
+
560
+ # Note that Python conveniently guarantees that when using the '<' prefix on
561
+ # formats, they will also have the same size across all platforms (as opposed
562
+ # to without the prefix, where their sizes depend on the C compiler's basic
563
+ # type sizes).
564
+ Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
565
+ Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
566
+ SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
567
+ SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
568
+ FloatDecoder = _FloatDecoder()
569
+ DoubleDecoder = _DoubleDecoder()
570
+
571
+ BoolDecoder = _ModifiedDecoder(
572
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
573
+
574
+
575
+ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
576
+ clear_if_default=False):
577
+ """Returns a decoder for a string field."""
578
+
579
+ local_DecodeVarint = _DecodeVarint
580
+
581
+ def _ConvertToUnicode(memview):
582
+ """Convert byte to unicode."""
583
+ byte_str = memview.tobytes()
584
+ try:
585
+ value = str(byte_str, 'utf-8')
586
+ except UnicodeDecodeError as e:
587
+ # add more information to the error message and re-raise it.
588
+ e.reason = '%s in field: %s' % (e, key.full_name)
589
+ raise
590
+
591
+ return value
592
+
593
+ assert not is_packed
594
+ if is_repeated:
595
+ tag_bytes = encoder.TagBytes(field_number,
596
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
597
+ tag_len = len(tag_bytes)
598
+ def DecodeRepeatedField(
599
+ buffer, pos, end, message, field_dict, current_depth=0
600
+ ):
601
+ del current_depth # unused
602
+ value = field_dict.get(key)
603
+ if value is None:
604
+ value = field_dict.setdefault(key, new_default(message))
605
+ while 1:
606
+ (size, pos) = local_DecodeVarint(buffer, pos)
607
+ new_pos = pos + size
608
+ if new_pos > end:
609
+ raise _DecodeError('Truncated string.')
610
+ value.append(_ConvertToUnicode(buffer[pos:new_pos]))
611
+ # Predict that the next tag is another copy of the same repeated field.
612
+ pos = new_pos + tag_len
613
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
614
+ # Prediction failed. Return.
615
+ return new_pos
616
+
617
+ return DecodeRepeatedField
618
+ else:
619
+
620
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
621
+ del current_depth # unused
622
+ (size, pos) = local_DecodeVarint(buffer, pos)
623
+ new_pos = pos + size
624
+ if new_pos > end:
625
+ raise _DecodeError('Truncated string.')
626
+ if clear_if_default and IsDefaultScalarValue(size):
627
+ field_dict.pop(key, None)
628
+ else:
629
+ field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
630
+ return new_pos
631
+
632
+ return DecodeField
633
+
634
+
635
+ def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
636
+ clear_if_default=False):
637
+ """Returns a decoder for a bytes field."""
638
+
639
+ local_DecodeVarint = _DecodeVarint
640
+
641
+ assert not is_packed
642
+ if is_repeated:
643
+ tag_bytes = encoder.TagBytes(field_number,
644
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
645
+ tag_len = len(tag_bytes)
646
+ def DecodeRepeatedField(
647
+ buffer, pos, end, message, field_dict, current_depth=0
648
+ ):
649
+ del current_depth # unused
650
+ value = field_dict.get(key)
651
+ if value is None:
652
+ value = field_dict.setdefault(key, new_default(message))
653
+ while 1:
654
+ (size, pos) = local_DecodeVarint(buffer, pos)
655
+ new_pos = pos + size
656
+ if new_pos > end:
657
+ raise _DecodeError('Truncated string.')
658
+ value.append(buffer[pos:new_pos].tobytes())
659
+ # Predict that the next tag is another copy of the same repeated field.
660
+ pos = new_pos + tag_len
661
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
662
+ # Prediction failed. Return.
663
+ return new_pos
664
+
665
+ return DecodeRepeatedField
666
+ else:
667
+
668
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
669
+ del current_depth # unused
670
+ (size, pos) = local_DecodeVarint(buffer, pos)
671
+ new_pos = pos + size
672
+ if new_pos > end:
673
+ raise _DecodeError('Truncated string.')
674
+ if clear_if_default and IsDefaultScalarValue(size):
675
+ field_dict.pop(key, None)
676
+ else:
677
+ field_dict[key] = buffer[pos:new_pos].tobytes()
678
+ return new_pos
679
+
680
+ return DecodeField
681
+
682
+
683
+ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
684
+ """Returns a decoder for a group field."""
685
+
686
+ end_tag_bytes = encoder.TagBytes(field_number,
687
+ wire_format.WIRETYPE_END_GROUP)
688
+ end_tag_len = len(end_tag_bytes)
689
+
690
+ assert not is_packed
691
+ if is_repeated:
692
+ tag_bytes = encoder.TagBytes(field_number,
693
+ wire_format.WIRETYPE_START_GROUP)
694
+ tag_len = len(tag_bytes)
695
+ def DecodeRepeatedField(
696
+ buffer, pos, end, message, field_dict, current_depth=0
697
+ ):
698
+ value = field_dict.get(key)
699
+ if value is None:
700
+ value = field_dict.setdefault(key, new_default(message))
701
+ while 1:
702
+ value = field_dict.get(key)
703
+ if value is None:
704
+ value = field_dict.setdefault(key, new_default(message))
705
+ # Read sub-message.
706
+ current_depth += 1
707
+ if current_depth > _recursion_limit:
708
+ raise _DecodeError(
709
+ 'Error parsing message: too many levels of nesting.'
710
+ )
711
+ pos = value.add()._InternalParse(buffer, pos, end, current_depth)
712
+ current_depth -= 1
713
+ # Read end tag.
714
+ new_pos = pos+end_tag_len
715
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
716
+ raise _DecodeError('Missing group end tag.')
717
+ # Predict that the next tag is another copy of the same repeated field.
718
+ pos = new_pos + tag_len
719
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
720
+ # Prediction failed. Return.
721
+ return new_pos
722
+
723
+ return DecodeRepeatedField
724
+ else:
725
+
726
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
727
+ value = field_dict.get(key)
728
+ if value is None:
729
+ value = field_dict.setdefault(key, new_default(message))
730
+ # Read sub-message.
731
+ current_depth += 1
732
+ if current_depth > _recursion_limit:
733
+ raise _DecodeError('Error parsing message: too many levels of nesting.')
734
+ pos = value._InternalParse(buffer, pos, end, current_depth)
735
+ current_depth -= 1
736
+ # Read end tag.
737
+ new_pos = pos+end_tag_len
738
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
739
+ raise _DecodeError('Missing group end tag.')
740
+ return new_pos
741
+
742
+ return DecodeField
743
+
744
+
745
+ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
746
+ """Returns a decoder for a message field."""
747
+
748
+ local_DecodeVarint = _DecodeVarint
749
+
750
+ assert not is_packed
751
+ if is_repeated:
752
+ tag_bytes = encoder.TagBytes(field_number,
753
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
754
+ tag_len = len(tag_bytes)
755
+ def DecodeRepeatedField(
756
+ buffer, pos, end, message, field_dict, current_depth=0
757
+ ):
758
+ value = field_dict.get(key)
759
+ if value is None:
760
+ value = field_dict.setdefault(key, new_default(message))
761
+ while 1:
762
+ # Read length.
763
+ (size, pos) = local_DecodeVarint(buffer, pos)
764
+ new_pos = pos + size
765
+ if new_pos > end:
766
+ raise _DecodeError('Truncated message.')
767
+ # Read sub-message.
768
+ current_depth += 1
769
+ if current_depth > _recursion_limit:
770
+ raise _DecodeError(
771
+ 'Error parsing message: too many levels of nesting.'
772
+ )
773
+ if (
774
+ value.add()._InternalParse(buffer, pos, new_pos, current_depth)
775
+ != new_pos
776
+ ):
777
+ # The only reason _InternalParse would return early is if it
778
+ # encountered an end-group tag.
779
+ raise _DecodeError('Unexpected end-group tag.')
780
+ current_depth -= 1
781
+ # Predict that the next tag is another copy of the same repeated field.
782
+ pos = new_pos + tag_len
783
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
784
+ # Prediction failed. Return.
785
+ return new_pos
786
+
787
+ return DecodeRepeatedField
788
+ else:
789
+
790
+ def DecodeField(buffer, pos, end, message, field_dict, current_depth=0):
791
+ value = field_dict.get(key)
792
+ if value is None:
793
+ value = field_dict.setdefault(key, new_default(message))
794
+ # Read length.
795
+ (size, pos) = local_DecodeVarint(buffer, pos)
796
+ new_pos = pos + size
797
+ if new_pos > end:
798
+ raise _DecodeError('Truncated message.')
799
+ # Read sub-message.
800
+ current_depth += 1
801
+ if current_depth > _recursion_limit:
802
+ raise _DecodeError('Error parsing message: too many levels of nesting.')
803
+ if value._InternalParse(buffer, pos, new_pos, current_depth) != new_pos:
804
+ # The only reason _InternalParse would return early is if it encountered
805
+ # an end-group tag.
806
+ raise _DecodeError('Unexpected end-group tag.')
807
+ current_depth -= 1
808
+ return new_pos
809
+
810
+ return DecodeField
811
+
812
+
813
+ # --------------------------------------------------------------------
814
+
815
+ MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
816
+
817
+ def MessageSetItemDecoder(descriptor):
818
+ """Returns a decoder for a MessageSet item.
819
+
820
+ The parameter is the message Descriptor.
821
+
822
+ The message set message looks like this:
823
+ message MessageSet {
824
+ repeated group Item = 1 {
825
+ required int32 type_id = 2;
826
+ required string message = 3;
827
+ }
828
+ }
829
+ """
830
+
831
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
832
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
833
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
834
+
835
+ local_ReadTag = ReadTag
836
+ local_DecodeVarint = _DecodeVarint
837
+
838
+ def DecodeItem(buffer, pos, end, message, field_dict):
839
+ """Decode serialized message set to its value and new position.
840
+
841
+ Args:
842
+ buffer: memoryview of the serialized bytes.
843
+ pos: int, position in the memory view to start at.
844
+ end: int, end position of serialized data
845
+ message: Message object to store unknown fields in
846
+ field_dict: Map[Descriptor, Any] to store decoded values in.
847
+
848
+ Returns:
849
+ int, new position in serialized data.
850
+ """
851
+ message_set_item_start = pos
852
+ type_id = -1
853
+ message_start = -1
854
+ message_end = -1
855
+
856
+ # Technically, type_id and message can appear in any order, so we need
857
+ # a little loop here.
858
+ while 1:
859
+ (tag_bytes, pos) = local_ReadTag(buffer, pos)
860
+ if tag_bytes == type_id_tag_bytes:
861
+ (type_id, pos) = local_DecodeVarint(buffer, pos)
862
+ elif tag_bytes == message_tag_bytes:
863
+ (size, message_start) = local_DecodeVarint(buffer, pos)
864
+ pos = message_end = message_start + size
865
+ elif tag_bytes == item_end_tag_bytes:
866
+ break
867
+ else:
868
+ field_number, wire_type = DecodeTag(tag_bytes)
869
+ _, pos = _DecodeUnknownField(buffer, pos, end, field_number, wire_type)
870
+ if pos == -1:
871
+ raise _DecodeError('Unexpected end-group tag.')
872
+
873
+ if pos > end:
874
+ raise _DecodeError('Truncated message.')
875
+
876
+ if type_id == -1:
877
+ raise _DecodeError('MessageSet item missing type_id.')
878
+ if message_start == -1:
879
+ raise _DecodeError('MessageSet item missing message.')
880
+
881
+ extension = message.Extensions._FindExtensionByNumber(type_id)
882
+ # pylint: disable=protected-access
883
+ if extension is not None:
884
+ value = field_dict.get(extension)
885
+ if value is None:
886
+ message_type = extension.message_type
887
+ if not hasattr(message_type, '_concrete_class'):
888
+ message_factory.GetMessageClass(message_type)
889
+ value = field_dict.setdefault(
890
+ extension, message_type._concrete_class())
891
+ if value._InternalParse(buffer, message_start,message_end) != message_end:
892
+ # The only reason _InternalParse would return early is if it encountered
893
+ # an end-group tag.
894
+ raise _DecodeError('Unexpected end-group tag.')
895
+ else:
896
+ if not message._unknown_fields:
897
+ message._unknown_fields = []
898
+ message._unknown_fields.append(
899
+ (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes()))
900
+ # pylint: enable=protected-access
901
+
902
+ return pos
903
+
904
+ return DecodeItem
905
+
906
+
907
+ def UnknownMessageSetItemDecoder():
908
+ """Returns a decoder for a Unknown MessageSet item."""
909
+
910
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
911
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
912
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
913
+
914
+ def DecodeUnknownItem(buffer):
915
+ pos = 0
916
+ end = len(buffer)
917
+ message_start = -1
918
+ message_end = -1
919
+ while 1:
920
+ (tag_bytes, pos) = ReadTag(buffer, pos)
921
+ if tag_bytes == type_id_tag_bytes:
922
+ (type_id, pos) = _DecodeVarint(buffer, pos)
923
+ elif tag_bytes == message_tag_bytes:
924
+ (size, message_start) = _DecodeVarint(buffer, pos)
925
+ pos = message_end = message_start + size
926
+ elif tag_bytes == item_end_tag_bytes:
927
+ break
928
+ else:
929
+ field_number, wire_type = DecodeTag(tag_bytes)
930
+ _, pos = _DecodeUnknownField(buffer, pos, end, field_number, wire_type)
931
+ if pos == -1:
932
+ raise _DecodeError('Unexpected end-group tag.')
933
+
934
+ if pos > end:
935
+ raise _DecodeError('Truncated message.')
936
+
937
+ if type_id == -1:
938
+ raise _DecodeError('MessageSet item missing type_id.')
939
+ if message_start == -1:
940
+ raise _DecodeError('MessageSet item missing message.')
941
+
942
+ return (type_id, buffer[message_start:message_end].tobytes())
943
+
944
+ return DecodeUnknownItem
945
+
946
+ # --------------------------------------------------------------------
947
+
948
+ def MapDecoder(field_descriptor, new_default, is_message_map):
949
+ """Returns a decoder for a map field."""
950
+
951
+ key = field_descriptor
952
+ tag_bytes = encoder.TagBytes(field_descriptor.number,
953
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
954
+ tag_len = len(tag_bytes)
955
+ local_DecodeVarint = _DecodeVarint
956
+ # Can't read _concrete_class yet; might not be initialized.
957
+ message_type = field_descriptor.message_type
958
+
959
+ def DecodeMap(buffer, pos, end, message, field_dict, current_depth=0):
960
+ del current_depth # Unused.
961
+ submsg = message_type._concrete_class()
962
+ value = field_dict.get(key)
963
+ if value is None:
964
+ value = field_dict.setdefault(key, new_default(message))
965
+ while 1:
966
+ # Read length.
967
+ (size, pos) = local_DecodeVarint(buffer, pos)
968
+ new_pos = pos + size
969
+ if new_pos > end:
970
+ raise _DecodeError('Truncated message.')
971
+ # Read sub-message.
972
+ submsg.Clear()
973
+ if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
974
+ # The only reason _InternalParse would return early is if it
975
+ # encountered an end-group tag.
976
+ raise _DecodeError('Unexpected end-group tag.')
977
+
978
+ if is_message_map:
979
+ value[submsg.key].CopyFrom(submsg.value)
980
+ else:
981
+ value[submsg.key] = submsg.value
982
+
983
+ # Predict that the next tag is another copy of the same repeated field.
984
+ pos = new_pos + tag_len
985
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
986
+ # Prediction failed. Return.
987
+ return new_pos
988
+
989
+ return DecodeMap
990
+
991
+
992
+ def _DecodeFixed64(buffer, pos):
993
+ """Decode a fixed64."""
994
+ new_pos = pos + 8
995
+ return (struct.unpack('<Q', buffer[pos:new_pos])[0], new_pos)
996
+
997
+
998
+ def _DecodeFixed32(buffer, pos):
999
+ """Decode a fixed32."""
1000
+
1001
+ new_pos = pos + 4
1002
+ return (struct.unpack('<I', buffer[pos:new_pos])[0], new_pos)
1003
+ DEFAULT_RECURSION_LIMIT = 100
1004
+ _recursion_limit = DEFAULT_RECURSION_LIMIT
1005
+
1006
+
1007
+ def SetRecursionLimit(new_limit):
1008
+ global _recursion_limit
1009
+ _recursion_limit = new_limit
1010
+
1011
+
1012
+ def _DecodeUnknownFieldSet(buffer, pos, end_pos=None, current_depth=0):
1013
+ """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
1014
+
1015
+ unknown_field_set = containers.UnknownFieldSet()
1016
+ while end_pos is None or pos < end_pos:
1017
+ (tag_bytes, pos) = ReadTag(buffer, pos)
1018
+ (tag, _) = _DecodeVarint(tag_bytes, 0)
1019
+ field_number, wire_type = wire_format.UnpackTag(tag)
1020
+ if wire_type == wire_format.WIRETYPE_END_GROUP:
1021
+ break
1022
+ (data, pos) = _DecodeUnknownField(
1023
+ buffer, pos, end_pos, field_number, wire_type, current_depth
1024
+ )
1025
+ # pylint: disable=protected-access
1026
+ unknown_field_set._add(field_number, wire_type, data)
1027
+
1028
+ return (unknown_field_set, pos)
1029
+
1030
+
1031
+ def _DecodeUnknownField(
1032
+ buffer, pos, end_pos, field_number, wire_type, current_depth=0
1033
+ ):
1034
+ """Decode a unknown field. Returns the UnknownField and new position."""
1035
+
1036
+ if wire_type == wire_format.WIRETYPE_VARINT:
1037
+ (data, pos) = _DecodeVarint(buffer, pos)
1038
+ elif wire_type == wire_format.WIRETYPE_FIXED64:
1039
+ (data, pos) = _DecodeFixed64(buffer, pos)
1040
+ elif wire_type == wire_format.WIRETYPE_FIXED32:
1041
+ (data, pos) = _DecodeFixed32(buffer, pos)
1042
+ elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
1043
+ (size, pos) = _DecodeVarint(buffer, pos)
1044
+ data = buffer[pos:pos+size].tobytes()
1045
+ pos += size
1046
+ elif wire_type == wire_format.WIRETYPE_START_GROUP:
1047
+ end_tag_bytes = encoder.TagBytes(
1048
+ field_number, wire_format.WIRETYPE_END_GROUP
1049
+ )
1050
+ current_depth += 1
1051
+ if current_depth >= _recursion_limit:
1052
+ raise _DecodeError('Error parsing message: too many levels of nesting.')
1053
+ data, pos = _DecodeUnknownFieldSet(buffer, pos, end_pos, current_depth)
1054
+ current_depth -= 1
1055
+ # Check end tag.
1056
+ if buffer[pos - len(end_tag_bytes) : pos] != end_tag_bytes:
1057
+ raise _DecodeError('Missing group end tag.')
1058
+ elif wire_type == wire_format.WIRETYPE_END_GROUP:
1059
+ return (0, -1)
1060
+ else:
1061
+ raise _DecodeError('Wrong wire type in tag.')
1062
+
1063
+ if pos > end_pos:
1064
+ raise _DecodeError('Truncated message.')
1065
+
1066
+ return (data, pos)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/encoder.py ADDED
@@ -0,0 +1,806 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Code for encoding protocol message primitives.
9
+
10
+ Contains the logic for encoding every logical protocol field type
11
+ into one of the 5 physical wire types.
12
+
13
+ This code is designed to push the Python interpreter's performance to the
14
+ limits.
15
+
16
+ The basic idea is that at startup time, for every field (i.e. every
17
+ FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
18
+ sizer takes a value of this field's type and computes its byte size. The
19
+ encoder takes a writer function and a value. It encodes the value into byte
20
+ strings and invokes the writer function to write those strings. Typically the
21
+ writer function is the write() method of a BytesIO.
22
+
23
+ We try to do as much work as possible when constructing the writer and the
24
+ sizer rather than when calling them. In particular:
25
+ * We copy any needed global functions to local variables, so that we do not need
26
+ to do costly global table lookups at runtime.
27
+ * Similarly, we try to do any attribute lookups at startup time if possible.
28
+ * Every field's tag is encoded to bytes at startup, since it can't change at
29
+ runtime.
30
+ * Whatever component of the field size we can compute at startup, we do.
31
+ * We *avoid* sharing code if doing so would make the code slower and not sharing
32
+ does not burden us too much. For example, encoders for repeated fields do
33
+ not just call the encoders for singular fields in a loop because this would
34
+ add an extra function call overhead for every loop iteration; instead, we
35
+ manually inline the single-value encoder into the loop.
36
+ * If a Python function lacks a return statement, Python actually generates
37
+ instructions to pop the result of the last statement off the stack, push
38
+ None onto the stack, and then return that. If we really don't care what
39
+ value is returned, then we can save two instructions by returning the
40
+ result of the last statement. It looks funny but it helps.
41
+ * We assume that type and bounds checking has happened at a higher level.
42
+ """
43
+
44
+ __author__ = 'kenton@google.com (Kenton Varda)'
45
+
46
+ import struct
47
+
48
+ from google.protobuf.internal import wire_format
49
+
50
+
51
+ # This will overflow and thus become IEEE-754 "infinity". We would use
52
+ # "float('inf')" but it doesn't work on Windows pre-Python-2.6.
53
+ _POS_INF = 1e10000
54
+ _NEG_INF = -_POS_INF
55
+
56
+
57
+ def _VarintSize(value):
58
+ """Compute the size of a varint value."""
59
+ if value <= 0x7f: return 1
60
+ if value <= 0x3fff: return 2
61
+ if value <= 0x1fffff: return 3
62
+ if value <= 0xfffffff: return 4
63
+ if value <= 0x7ffffffff: return 5
64
+ if value <= 0x3ffffffffff: return 6
65
+ if value <= 0x1ffffffffffff: return 7
66
+ if value <= 0xffffffffffffff: return 8
67
+ if value <= 0x7fffffffffffffff: return 9
68
+ return 10
69
+
70
+
71
+ def _SignedVarintSize(value):
72
+ """Compute the size of a signed varint value."""
73
+ if value < 0: return 10
74
+ if value <= 0x7f: return 1
75
+ if value <= 0x3fff: return 2
76
+ if value <= 0x1fffff: return 3
77
+ if value <= 0xfffffff: return 4
78
+ if value <= 0x7ffffffff: return 5
79
+ if value <= 0x3ffffffffff: return 6
80
+ if value <= 0x1ffffffffffff: return 7
81
+ if value <= 0xffffffffffffff: return 8
82
+ if value <= 0x7fffffffffffffff: return 9
83
+ return 10
84
+
85
+
86
+ def _TagSize(field_number):
87
+ """Returns the number of bytes required to serialize a tag with this field
88
+ number."""
89
+ # Just pass in type 0, since the type won't affect the tag+type size.
90
+ return _VarintSize(wire_format.PackTag(field_number, 0))
91
+
92
+
93
+ # --------------------------------------------------------------------
94
+ # In this section we define some generic sizers. Each of these functions
95
+ # takes parameters specific to a particular field type, e.g. int32 or fixed64.
96
+ # It returns another function which in turn takes parameters specific to a
97
+ # particular field, e.g. the field number and whether it is repeated or packed.
98
+ # Look at the next section to see how these are used.
99
+
100
+
101
+ def _SimpleSizer(compute_value_size):
102
+ """A sizer which uses the function compute_value_size to compute the size of
103
+ each value. Typically compute_value_size is _VarintSize."""
104
+
105
+ def SpecificSizer(field_number, is_repeated, is_packed):
106
+ tag_size = _TagSize(field_number)
107
+ if is_packed:
108
+ local_VarintSize = _VarintSize
109
+ def PackedFieldSize(value):
110
+ result = 0
111
+ for element in value:
112
+ result += compute_value_size(element)
113
+ return result + local_VarintSize(result) + tag_size
114
+ return PackedFieldSize
115
+ elif is_repeated:
116
+ def RepeatedFieldSize(value):
117
+ result = tag_size * len(value)
118
+ for element in value:
119
+ result += compute_value_size(element)
120
+ return result
121
+ return RepeatedFieldSize
122
+ else:
123
+ def FieldSize(value):
124
+ return tag_size + compute_value_size(value)
125
+ return FieldSize
126
+
127
+ return SpecificSizer
128
+
129
+
130
+ def _ModifiedSizer(compute_value_size, modify_value):
131
+ """Like SimpleSizer, but modify_value is invoked on each value before it is
132
+ passed to compute_value_size. modify_value is typically ZigZagEncode."""
133
+
134
+ def SpecificSizer(field_number, is_repeated, is_packed):
135
+ tag_size = _TagSize(field_number)
136
+ if is_packed:
137
+ local_VarintSize = _VarintSize
138
+ def PackedFieldSize(value):
139
+ result = 0
140
+ for element in value:
141
+ result += compute_value_size(modify_value(element))
142
+ return result + local_VarintSize(result) + tag_size
143
+ return PackedFieldSize
144
+ elif is_repeated:
145
+ def RepeatedFieldSize(value):
146
+ result = tag_size * len(value)
147
+ for element in value:
148
+ result += compute_value_size(modify_value(element))
149
+ return result
150
+ return RepeatedFieldSize
151
+ else:
152
+ def FieldSize(value):
153
+ return tag_size + compute_value_size(modify_value(value))
154
+ return FieldSize
155
+
156
+ return SpecificSizer
157
+
158
+
159
+ def _FixedSizer(value_size):
160
+ """Like _SimpleSizer except for a fixed-size field. The input is the size
161
+ of one value."""
162
+
163
+ def SpecificSizer(field_number, is_repeated, is_packed):
164
+ tag_size = _TagSize(field_number)
165
+ if is_packed:
166
+ local_VarintSize = _VarintSize
167
+ def PackedFieldSize(value):
168
+ result = len(value) * value_size
169
+ return result + local_VarintSize(result) + tag_size
170
+ return PackedFieldSize
171
+ elif is_repeated:
172
+ element_size = value_size + tag_size
173
+ def RepeatedFieldSize(value):
174
+ return len(value) * element_size
175
+ return RepeatedFieldSize
176
+ else:
177
+ field_size = value_size + tag_size
178
+ def FieldSize(value):
179
+ return field_size
180
+ return FieldSize
181
+
182
+ return SpecificSizer
183
+
184
+
185
+ # ====================================================================
186
+ # Here we declare a sizer constructor for each field type. Each "sizer
187
+ # constructor" is a function that takes (field_number, is_repeated, is_packed)
188
+ # as parameters and returns a sizer, which in turn takes a field value as
189
+ # a parameter and returns its encoded size.
190
+
191
+
192
+ Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
193
+
194
+ UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
195
+
196
+ SInt32Sizer = SInt64Sizer = _ModifiedSizer(
197
+ _SignedVarintSize, wire_format.ZigZagEncode)
198
+
199
+ Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
200
+ Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
201
+
202
+ BoolSizer = _FixedSizer(1)
203
+
204
+
205
+ def StringSizer(field_number, is_repeated, is_packed):
206
+ """Returns a sizer for a string field."""
207
+
208
+ tag_size = _TagSize(field_number)
209
+ local_VarintSize = _VarintSize
210
+ local_len = len
211
+ assert not is_packed
212
+ if is_repeated:
213
+ def RepeatedFieldSize(value):
214
+ result = tag_size * len(value)
215
+ for element in value:
216
+ l = local_len(element.encode('utf-8'))
217
+ result += local_VarintSize(l) + l
218
+ return result
219
+ return RepeatedFieldSize
220
+ else:
221
+ def FieldSize(value):
222
+ l = local_len(value.encode('utf-8'))
223
+ return tag_size + local_VarintSize(l) + l
224
+ return FieldSize
225
+
226
+
227
+ def BytesSizer(field_number, is_repeated, is_packed):
228
+ """Returns a sizer for a bytes field."""
229
+
230
+ tag_size = _TagSize(field_number)
231
+ local_VarintSize = _VarintSize
232
+ local_len = len
233
+ assert not is_packed
234
+ if is_repeated:
235
+ def RepeatedFieldSize(value):
236
+ result = tag_size * len(value)
237
+ for element in value:
238
+ l = local_len(element)
239
+ result += local_VarintSize(l) + l
240
+ return result
241
+ return RepeatedFieldSize
242
+ else:
243
+ def FieldSize(value):
244
+ l = local_len(value)
245
+ return tag_size + local_VarintSize(l) + l
246
+ return FieldSize
247
+
248
+
249
+ def GroupSizer(field_number, is_repeated, is_packed):
250
+ """Returns a sizer for a group field."""
251
+
252
+ tag_size = _TagSize(field_number) * 2
253
+ assert not is_packed
254
+ if is_repeated:
255
+ def RepeatedFieldSize(value):
256
+ result = tag_size * len(value)
257
+ for element in value:
258
+ result += element.ByteSize()
259
+ return result
260
+ return RepeatedFieldSize
261
+ else:
262
+ def FieldSize(value):
263
+ return tag_size + value.ByteSize()
264
+ return FieldSize
265
+
266
+
267
+ def MessageSizer(field_number, is_repeated, is_packed):
268
+ """Returns a sizer for a message field."""
269
+
270
+ tag_size = _TagSize(field_number)
271
+ local_VarintSize = _VarintSize
272
+ assert not is_packed
273
+ if is_repeated:
274
+ def RepeatedFieldSize(value):
275
+ result = tag_size * len(value)
276
+ for element in value:
277
+ l = element.ByteSize()
278
+ result += local_VarintSize(l) + l
279
+ return result
280
+ return RepeatedFieldSize
281
+ else:
282
+ def FieldSize(value):
283
+ l = value.ByteSize()
284
+ return tag_size + local_VarintSize(l) + l
285
+ return FieldSize
286
+
287
+
288
+ # --------------------------------------------------------------------
289
+ # MessageSet is special: it needs custom logic to compute its size properly.
290
+
291
+
292
+ def MessageSetItemSizer(field_number):
293
+ """Returns a sizer for extensions of MessageSet.
294
+
295
+ The message set message looks like this:
296
+ message MessageSet {
297
+ repeated group Item = 1 {
298
+ required int32 type_id = 2;
299
+ required string message = 3;
300
+ }
301
+ }
302
+ """
303
+ static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
304
+ _TagSize(3))
305
+ local_VarintSize = _VarintSize
306
+
307
+ def FieldSize(value):
308
+ l = value.ByteSize()
309
+ return static_size + local_VarintSize(l) + l
310
+
311
+ return FieldSize
312
+
313
+
314
+ # --------------------------------------------------------------------
315
+ # Map is special: it needs custom logic to compute its size properly.
316
+
317
+
318
+ def MapSizer(field_descriptor, is_message_map):
319
+ """Returns a sizer for a map field."""
320
+
321
+ # Can't look at field_descriptor.message_type._concrete_class because it may
322
+ # not have been initialized yet.
323
+ message_type = field_descriptor.message_type
324
+ message_sizer = MessageSizer(field_descriptor.number, False, False)
325
+
326
+ def FieldSize(map_value):
327
+ total = 0
328
+ for key in map_value:
329
+ value = map_value[key]
330
+ # It's wasteful to create the messages and throw them away one second
331
+ # later since we'll do the same for the actual encode. But there's not an
332
+ # obvious way to avoid this within the current design without tons of code
333
+ # duplication. For message map, value.ByteSize() should be called to
334
+ # update the status.
335
+ entry_msg = message_type._concrete_class(key=key, value=value)
336
+ total += message_sizer(entry_msg)
337
+ if is_message_map:
338
+ value.ByteSize()
339
+ return total
340
+
341
+ return FieldSize
342
+
343
+ # ====================================================================
344
+ # Encoders!
345
+
346
+
347
+ def _VarintEncoder():
348
+ """Return an encoder for a basic varint value (does not include tag)."""
349
+
350
+ local_int2byte = struct.Struct('>B').pack
351
+
352
+ def EncodeVarint(write, value, unused_deterministic=None):
353
+ bits = value & 0x7f
354
+ value >>= 7
355
+ while value:
356
+ write(local_int2byte(0x80|bits))
357
+ bits = value & 0x7f
358
+ value >>= 7
359
+ return write(local_int2byte(bits))
360
+
361
+ return EncodeVarint
362
+
363
+
364
+ def _SignedVarintEncoder():
365
+ """Return an encoder for a basic signed varint value (does not include
366
+ tag)."""
367
+
368
+ local_int2byte = struct.Struct('>B').pack
369
+
370
+ def EncodeSignedVarint(write, value, unused_deterministic=None):
371
+ if value < 0:
372
+ value += (1 << 64)
373
+ bits = value & 0x7f
374
+ value >>= 7
375
+ while value:
376
+ write(local_int2byte(0x80|bits))
377
+ bits = value & 0x7f
378
+ value >>= 7
379
+ return write(local_int2byte(bits))
380
+
381
+ return EncodeSignedVarint
382
+
383
+
384
+ _EncodeVarint = _VarintEncoder()
385
+ _EncodeSignedVarint = _SignedVarintEncoder()
386
+
387
+
388
+ def _VarintBytes(value):
389
+ """Encode the given integer as a varint and return the bytes. This is only
390
+ called at startup time so it doesn't need to be fast."""
391
+
392
+ pieces = []
393
+ _EncodeVarint(pieces.append, value, True)
394
+ return b"".join(pieces)
395
+
396
+
397
+ def TagBytes(field_number, wire_type):
398
+ """Encode the given tag and return the bytes. Only called at startup."""
399
+
400
+ return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
401
+
402
+ # --------------------------------------------------------------------
403
+ # As with sizers (see above), we have a number of common encoder
404
+ # implementations.
405
+
406
+
407
+ def _SimpleEncoder(wire_type, encode_value, compute_value_size):
408
+ """Return a constructor for an encoder for fields of a particular type.
409
+
410
+ Args:
411
+ wire_type: The field's wire type, for encoding tags.
412
+ encode_value: A function which encodes an individual value, e.g.
413
+ _EncodeVarint().
414
+ compute_value_size: A function which computes the size of an individual
415
+ value, e.g. _VarintSize().
416
+ """
417
+
418
+ def SpecificEncoder(field_number, is_repeated, is_packed):
419
+ if is_packed:
420
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
421
+ local_EncodeVarint = _EncodeVarint
422
+ def EncodePackedField(write, value, deterministic):
423
+ write(tag_bytes)
424
+ size = 0
425
+ for element in value:
426
+ size += compute_value_size(element)
427
+ local_EncodeVarint(write, size, deterministic)
428
+ for element in value:
429
+ encode_value(write, element, deterministic)
430
+ return EncodePackedField
431
+ elif is_repeated:
432
+ tag_bytes = TagBytes(field_number, wire_type)
433
+ def EncodeRepeatedField(write, value, deterministic):
434
+ for element in value:
435
+ write(tag_bytes)
436
+ encode_value(write, element, deterministic)
437
+ return EncodeRepeatedField
438
+ else:
439
+ tag_bytes = TagBytes(field_number, wire_type)
440
+ def EncodeField(write, value, deterministic):
441
+ write(tag_bytes)
442
+ return encode_value(write, value, deterministic)
443
+ return EncodeField
444
+
445
+ return SpecificEncoder
446
+
447
+
448
+ def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
449
+ """Like SimpleEncoder but additionally invokes modify_value on every value
450
+ before passing it to encode_value. Usually modify_value is ZigZagEncode."""
451
+
452
+ def SpecificEncoder(field_number, is_repeated, is_packed):
453
+ if is_packed:
454
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
455
+ local_EncodeVarint = _EncodeVarint
456
+ def EncodePackedField(write, value, deterministic):
457
+ write(tag_bytes)
458
+ size = 0
459
+ for element in value:
460
+ size += compute_value_size(modify_value(element))
461
+ local_EncodeVarint(write, size, deterministic)
462
+ for element in value:
463
+ encode_value(write, modify_value(element), deterministic)
464
+ return EncodePackedField
465
+ elif is_repeated:
466
+ tag_bytes = TagBytes(field_number, wire_type)
467
+ def EncodeRepeatedField(write, value, deterministic):
468
+ for element in value:
469
+ write(tag_bytes)
470
+ encode_value(write, modify_value(element), deterministic)
471
+ return EncodeRepeatedField
472
+ else:
473
+ tag_bytes = TagBytes(field_number, wire_type)
474
+ def EncodeField(write, value, deterministic):
475
+ write(tag_bytes)
476
+ return encode_value(write, modify_value(value), deterministic)
477
+ return EncodeField
478
+
479
+ return SpecificEncoder
480
+
481
+
482
+ def _StructPackEncoder(wire_type, format):
483
+ """Return a constructor for an encoder for a fixed-width field.
484
+
485
+ Args:
486
+ wire_type: The field's wire type, for encoding tags.
487
+ format: The format string to pass to struct.pack().
488
+ """
489
+
490
+ value_size = struct.calcsize(format)
491
+
492
+ def SpecificEncoder(field_number, is_repeated, is_packed):
493
+ local_struct_pack = struct.pack
494
+ if is_packed:
495
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
496
+ local_EncodeVarint = _EncodeVarint
497
+ def EncodePackedField(write, value, deterministic):
498
+ write(tag_bytes)
499
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
500
+ for element in value:
501
+ write(local_struct_pack(format, element))
502
+ return EncodePackedField
503
+ elif is_repeated:
504
+ tag_bytes = TagBytes(field_number, wire_type)
505
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
506
+ for element in value:
507
+ write(tag_bytes)
508
+ write(local_struct_pack(format, element))
509
+ return EncodeRepeatedField
510
+ else:
511
+ tag_bytes = TagBytes(field_number, wire_type)
512
+ def EncodeField(write, value, unused_deterministic=None):
513
+ write(tag_bytes)
514
+ return write(local_struct_pack(format, value))
515
+ return EncodeField
516
+
517
+ return SpecificEncoder
518
+
519
+
520
+ def _FloatingPointEncoder(wire_type, format):
521
+ """Return a constructor for an encoder for float fields.
522
+
523
+ This is like StructPackEncoder, but catches errors that may be due to
524
+ passing non-finite floating-point values to struct.pack, and makes a
525
+ second attempt to encode those values.
526
+
527
+ Args:
528
+ wire_type: The field's wire type, for encoding tags.
529
+ format: The format string to pass to struct.pack().
530
+ """
531
+
532
+ value_size = struct.calcsize(format)
533
+ if value_size == 4:
534
+ def EncodeNonFiniteOrRaise(write, value):
535
+ # Remember that the serialized form uses little-endian byte order.
536
+ if value == _POS_INF:
537
+ write(b'\x00\x00\x80\x7F')
538
+ elif value == _NEG_INF:
539
+ write(b'\x00\x00\x80\xFF')
540
+ elif value != value: # NaN
541
+ write(b'\x00\x00\xC0\x7F')
542
+ else:
543
+ raise
544
+ elif value_size == 8:
545
+ def EncodeNonFiniteOrRaise(write, value):
546
+ if value == _POS_INF:
547
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
548
+ elif value == _NEG_INF:
549
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
550
+ elif value != value: # NaN
551
+ write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
552
+ else:
553
+ raise
554
+ else:
555
+ raise ValueError('Can\'t encode floating-point values that are '
556
+ '%d bytes long (only 4 or 8)' % value_size)
557
+
558
+ def SpecificEncoder(field_number, is_repeated, is_packed):
559
+ local_struct_pack = struct.pack
560
+ if is_packed:
561
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
562
+ local_EncodeVarint = _EncodeVarint
563
+ def EncodePackedField(write, value, deterministic):
564
+ write(tag_bytes)
565
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
566
+ for element in value:
567
+ # This try/except block is going to be faster than any code that
568
+ # we could write to check whether element is finite.
569
+ try:
570
+ write(local_struct_pack(format, element))
571
+ except SystemError:
572
+ EncodeNonFiniteOrRaise(write, element)
573
+ return EncodePackedField
574
+ elif is_repeated:
575
+ tag_bytes = TagBytes(field_number, wire_type)
576
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
577
+ for element in value:
578
+ write(tag_bytes)
579
+ try:
580
+ write(local_struct_pack(format, element))
581
+ except SystemError:
582
+ EncodeNonFiniteOrRaise(write, element)
583
+ return EncodeRepeatedField
584
+ else:
585
+ tag_bytes = TagBytes(field_number, wire_type)
586
+ def EncodeField(write, value, unused_deterministic=None):
587
+ write(tag_bytes)
588
+ try:
589
+ write(local_struct_pack(format, value))
590
+ except SystemError:
591
+ EncodeNonFiniteOrRaise(write, value)
592
+ return EncodeField
593
+
594
+ return SpecificEncoder
595
+
596
+
597
+ # ====================================================================
598
+ # Here we declare an encoder constructor for each field type. These work
599
+ # very similarly to sizer constructors, described earlier.
600
+
601
+
602
+ Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
603
+ wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
604
+
605
+ UInt32Encoder = UInt64Encoder = _SimpleEncoder(
606
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
607
+
608
+ SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
609
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
610
+ wire_format.ZigZagEncode)
611
+
612
+ # Note that Python conveniently guarantees that when using the '<' prefix on
613
+ # formats, they will also have the same size across all platforms (as opposed
614
+ # to without the prefix, where their sizes depend on the C compiler's basic
615
+ # type sizes).
616
+ Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
617
+ Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
618
+ SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
619
+ SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
620
+ FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
621
+ DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
622
+
623
+
624
+ def BoolEncoder(field_number, is_repeated, is_packed):
625
+ """Returns an encoder for a boolean field."""
626
+
627
+ false_byte = b'\x00'
628
+ true_byte = b'\x01'
629
+ if is_packed:
630
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
631
+ local_EncodeVarint = _EncodeVarint
632
+ def EncodePackedField(write, value, deterministic):
633
+ write(tag_bytes)
634
+ local_EncodeVarint(write, len(value), deterministic)
635
+ for element in value:
636
+ if element:
637
+ write(true_byte)
638
+ else:
639
+ write(false_byte)
640
+ return EncodePackedField
641
+ elif is_repeated:
642
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
643
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
644
+ for element in value:
645
+ write(tag_bytes)
646
+ if element:
647
+ write(true_byte)
648
+ else:
649
+ write(false_byte)
650
+ return EncodeRepeatedField
651
+ else:
652
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
653
+ def EncodeField(write, value, unused_deterministic=None):
654
+ write(tag_bytes)
655
+ if value:
656
+ return write(true_byte)
657
+ return write(false_byte)
658
+ return EncodeField
659
+
660
+
661
+ def StringEncoder(field_number, is_repeated, is_packed):
662
+ """Returns an encoder for a string field."""
663
+
664
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
665
+ local_EncodeVarint = _EncodeVarint
666
+ local_len = len
667
+ assert not is_packed
668
+ if is_repeated:
669
+ def EncodeRepeatedField(write, value, deterministic):
670
+ for element in value:
671
+ encoded = element.encode('utf-8')
672
+ write(tag)
673
+ local_EncodeVarint(write, local_len(encoded), deterministic)
674
+ write(encoded)
675
+ return EncodeRepeatedField
676
+ else:
677
+ def EncodeField(write, value, deterministic):
678
+ encoded = value.encode('utf-8')
679
+ write(tag)
680
+ local_EncodeVarint(write, local_len(encoded), deterministic)
681
+ return write(encoded)
682
+ return EncodeField
683
+
684
+
685
+ def BytesEncoder(field_number, is_repeated, is_packed):
686
+ """Returns an encoder for a bytes field."""
687
+
688
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
689
+ local_EncodeVarint = _EncodeVarint
690
+ local_len = len
691
+ assert not is_packed
692
+ if is_repeated:
693
+ def EncodeRepeatedField(write, value, deterministic):
694
+ for element in value:
695
+ write(tag)
696
+ local_EncodeVarint(write, local_len(element), deterministic)
697
+ write(element)
698
+ return EncodeRepeatedField
699
+ else:
700
+ def EncodeField(write, value, deterministic):
701
+ write(tag)
702
+ local_EncodeVarint(write, local_len(value), deterministic)
703
+ return write(value)
704
+ return EncodeField
705
+
706
+
707
+ def GroupEncoder(field_number, is_repeated, is_packed):
708
+ """Returns an encoder for a group field."""
709
+
710
+ start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
711
+ end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
712
+ assert not is_packed
713
+ if is_repeated:
714
+ def EncodeRepeatedField(write, value, deterministic):
715
+ for element in value:
716
+ write(start_tag)
717
+ element._InternalSerialize(write, deterministic)
718
+ write(end_tag)
719
+ return EncodeRepeatedField
720
+ else:
721
+ def EncodeField(write, value, deterministic):
722
+ write(start_tag)
723
+ value._InternalSerialize(write, deterministic)
724
+ return write(end_tag)
725
+ return EncodeField
726
+
727
+
728
+ def MessageEncoder(field_number, is_repeated, is_packed):
729
+ """Returns an encoder for a message field."""
730
+
731
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
732
+ local_EncodeVarint = _EncodeVarint
733
+ assert not is_packed
734
+ if is_repeated:
735
+ def EncodeRepeatedField(write, value, deterministic):
736
+ for element in value:
737
+ write(tag)
738
+ local_EncodeVarint(write, element.ByteSize(), deterministic)
739
+ element._InternalSerialize(write, deterministic)
740
+ return EncodeRepeatedField
741
+ else:
742
+ def EncodeField(write, value, deterministic):
743
+ write(tag)
744
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
745
+ return value._InternalSerialize(write, deterministic)
746
+ return EncodeField
747
+
748
+
749
+ # --------------------------------------------------------------------
750
+ # As before, MessageSet is special.
751
+
752
+
753
+ def MessageSetItemEncoder(field_number):
754
+ """Encoder for extensions of MessageSet.
755
+
756
+ The message set message looks like this:
757
+ message MessageSet {
758
+ repeated group Item = 1 {
759
+ required int32 type_id = 2;
760
+ required string message = 3;
761
+ }
762
+ }
763
+ """
764
+ start_bytes = b"".join([
765
+ TagBytes(1, wire_format.WIRETYPE_START_GROUP),
766
+ TagBytes(2, wire_format.WIRETYPE_VARINT),
767
+ _VarintBytes(field_number),
768
+ TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
769
+ end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
770
+ local_EncodeVarint = _EncodeVarint
771
+
772
+ def EncodeField(write, value, deterministic):
773
+ write(start_bytes)
774
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
775
+ value._InternalSerialize(write, deterministic)
776
+ return write(end_bytes)
777
+
778
+ return EncodeField
779
+
780
+
781
+ # --------------------------------------------------------------------
782
+ # As before, Map is special.
783
+
784
+
785
+ def MapEncoder(field_descriptor):
786
+ """Encoder for extensions of MessageSet.
787
+
788
+ Maps always have a wire format like this:
789
+ message MapEntry {
790
+ key_type key = 1;
791
+ value_type value = 2;
792
+ }
793
+ repeated MapEntry map = N;
794
+ """
795
+ # Can't look at field_descriptor.message_type._concrete_class because it may
796
+ # not have been initialized yet.
797
+ message_type = field_descriptor.message_type
798
+ encode_message = MessageEncoder(field_descriptor.number, False, False)
799
+
800
+ def EncodeField(write, value, deterministic):
801
+ value_keys = sorted(value.keys()) if deterministic else value
802
+ for key in value_keys:
803
+ entry_msg = message_type._concrete_class(key=key, value=value[key])
804
+ encode_message(write, entry_msg, deterministic)
805
+
806
+ return EncodeField
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/enum_type_wrapper.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A simple wrapper around enum types to expose utility functions.
9
+
10
+ Instances are created as properties with the same name as the enum they wrap
11
+ on proto classes. For usage, see:
12
+ reflection_test.py
13
+ """
14
+
15
+ import sys
16
+
17
+ __author__ = 'rabsatt@google.com (Kevin Rabsatt)'
18
+
19
+
20
+ class EnumTypeWrapper(object):
21
+ """A utility for finding the names of enum values."""
22
+
23
+ DESCRIPTOR = None
24
+
25
+ # This is a type alias, which mypy typing stubs can type as
26
+ # a genericized parameter constrained to an int, allowing subclasses
27
+ # to be typed with more constraint in .pyi stubs
28
+ # Eg.
29
+ # def MyGeneratedEnum(Message):
30
+ # ValueType = NewType('ValueType', int)
31
+ # def Name(self, number: MyGeneratedEnum.ValueType) -> str
32
+ ValueType = int
33
+
34
+ def __init__(self, enum_type):
35
+ """Inits EnumTypeWrapper with an EnumDescriptor."""
36
+ self._enum_type = enum_type
37
+ self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
38
+
39
+ def Name(self, number): # pylint: disable=invalid-name
40
+ """Returns a string containing the name of an enum value."""
41
+ try:
42
+ return self._enum_type.values_by_number[number].name
43
+ except KeyError:
44
+ pass # fall out to break exception chaining
45
+
46
+ if not isinstance(number, int):
47
+ raise TypeError(
48
+ 'Enum value for {} must be an int, but got {} {!r}.'.format(
49
+ self._enum_type.name, type(number), number))
50
+ else:
51
+ # repr here to handle the odd case when you pass in a boolean.
52
+ raise ValueError('Enum {} has no name defined for value {!r}'.format(
53
+ self._enum_type.name, number))
54
+
55
+ def Value(self, name): # pylint: disable=invalid-name
56
+ """Returns the value corresponding to the given enum name."""
57
+ try:
58
+ return self._enum_type.values_by_name[name].number
59
+ except KeyError:
60
+ pass # fall out to break exception chaining
61
+ raise ValueError('Enum {} has no value defined for name {!r}'.format(
62
+ self._enum_type.name, name))
63
+
64
+ def keys(self):
65
+ """Return a list of the string names in the enum.
66
+
67
+ Returns:
68
+ A list of strs, in the order they were defined in the .proto file.
69
+ """
70
+
71
+ return [value_descriptor.name
72
+ for value_descriptor in self._enum_type.values]
73
+
74
+ def values(self):
75
+ """Return a list of the integer values in the enum.
76
+
77
+ Returns:
78
+ A list of ints, in the order they were defined in the .proto file.
79
+ """
80
+
81
+ return [value_descriptor.number
82
+ for value_descriptor in self._enum_type.values]
83
+
84
+ def items(self):
85
+ """Return a list of the (name, value) pairs of the enum.
86
+
87
+ Returns:
88
+ A list of (str, int) pairs, in the order they were defined
89
+ in the .proto file.
90
+ """
91
+ return [(value_descriptor.name, value_descriptor.number)
92
+ for value_descriptor in self._enum_type.values]
93
+
94
+ def __getattr__(self, name):
95
+ """Returns the value corresponding to the given enum name."""
96
+ try:
97
+ return super(
98
+ EnumTypeWrapper,
99
+ self).__getattribute__('_enum_type').values_by_name[name].number
100
+ except KeyError:
101
+ pass # fall out to break exception chaining
102
+ raise AttributeError('Enum {} has no value defined for name {!r}'.format(
103
+ self._enum_type.name, name))
104
+
105
+ def __or__(self, other):
106
+ """Returns the union type of self and other."""
107
+ if sys.version_info >= (3, 10):
108
+ return type(self) | other
109
+ else:
110
+ raise NotImplementedError(
111
+ 'You may not use | on EnumTypes (or classes) below python 3.10'
112
+ )
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/extension_dict.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains _ExtensionDict class to represent extensions.
9
+ """
10
+
11
+ from google.protobuf.internal import type_checkers
12
+ from google.protobuf.descriptor import FieldDescriptor
13
+
14
+
15
+ def _VerifyExtensionHandle(message, extension_handle):
16
+ """Verify that the given extension handle is valid."""
17
+
18
+ if not isinstance(extension_handle, FieldDescriptor):
19
+ raise KeyError('HasExtension() expects an extension handle, got: %s' %
20
+ extension_handle)
21
+
22
+ if not extension_handle.is_extension:
23
+ raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
24
+
25
+ if not extension_handle.containing_type:
26
+ raise KeyError('"%s" is missing a containing_type.'
27
+ % extension_handle.full_name)
28
+
29
+ if extension_handle.containing_type is not message.DESCRIPTOR:
30
+ raise KeyError('Extension "%s" extends message type "%s", but this '
31
+ 'message is of type "%s".' %
32
+ (extension_handle.full_name,
33
+ extension_handle.containing_type.full_name,
34
+ message.DESCRIPTOR.full_name))
35
+
36
+
37
+ # TODO: Unify error handling of "unknown extension" crap.
38
+ # TODO: Support iteritems()-style iteration over all
39
+ # extensions with the "has" bits turned on?
40
+ class _ExtensionDict(object):
41
+
42
+ """Dict-like container for Extension fields on proto instances.
43
+
44
+ Note that in all cases we expect extension handles to be
45
+ FieldDescriptors.
46
+ """
47
+
48
+ def __init__(self, extended_message):
49
+ """
50
+ Args:
51
+ extended_message: Message instance for which we are the Extensions dict.
52
+ """
53
+ self._extended_message = extended_message
54
+
55
+ def __getitem__(self, extension_handle):
56
+ """Returns the current value of the given extension handle."""
57
+
58
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
59
+
60
+ result = self._extended_message._fields.get(extension_handle)
61
+ if result is not None:
62
+ return result
63
+
64
+ if extension_handle.is_repeated:
65
+ result = extension_handle._default_constructor(self._extended_message)
66
+ elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
67
+ message_type = extension_handle.message_type
68
+ if not hasattr(message_type, '_concrete_class'):
69
+ # pylint: disable=g-import-not-at-top
70
+ from google.protobuf import message_factory
71
+ message_factory.GetMessageClass(message_type)
72
+ if not hasattr(extension_handle.message_type, '_concrete_class'):
73
+ from google.protobuf import message_factory
74
+ message_factory.GetMessageClass(extension_handle.message_type)
75
+ result = extension_handle.message_type._concrete_class()
76
+ try:
77
+ result._SetListener(self._extended_message._listener_for_children)
78
+ except ReferenceError:
79
+ pass
80
+ else:
81
+ # Singular scalar -- just return the default without inserting into the
82
+ # dict.
83
+ return extension_handle.default_value
84
+
85
+ # Atomically check if another thread has preempted us and, if not, swap
86
+ # in the new object we just created. If someone has preempted us, we
87
+ # take that object and discard ours.
88
+ # WARNING: We are relying on setdefault() being atomic. This is true
89
+ # in CPython but we haven't investigated others. This warning appears
90
+ # in several other locations in this file.
91
+ result = self._extended_message._fields.setdefault(
92
+ extension_handle, result)
93
+
94
+ return result
95
+
96
+ def __eq__(self, other):
97
+ if not isinstance(other, self.__class__):
98
+ return False
99
+
100
+ my_fields = self._extended_message.ListFields()
101
+ other_fields = other._extended_message.ListFields()
102
+
103
+ # Get rid of non-extension fields.
104
+ my_fields = [field for field in my_fields if field.is_extension]
105
+ other_fields = [field for field in other_fields if field.is_extension]
106
+
107
+ return my_fields == other_fields
108
+
109
+ def __ne__(self, other):
110
+ return not self == other
111
+
112
+ def __len__(self):
113
+ fields = self._extended_message.ListFields()
114
+ # Get rid of non-extension fields.
115
+ extension_fields = [field for field in fields if field[0].is_extension]
116
+ return len(extension_fields)
117
+
118
+ def __hash__(self):
119
+ raise TypeError('unhashable object')
120
+
121
+ # Note that this is only meaningful for non-repeated, scalar extension
122
+ # fields. Note also that we may have to call _Modified() when we do
123
+ # successfully set a field this way, to set any necessary "has" bits in the
124
+ # ancestors of the extended message.
125
+ def __setitem__(self, extension_handle, value):
126
+ """If extension_handle specifies a non-repeated, scalar extension
127
+ field, sets the value of that field.
128
+ """
129
+
130
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
131
+
132
+ if (extension_handle.is_repeated or
133
+ extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
134
+ raise TypeError(
135
+ 'Cannot assign to extension "%s" because it is a repeated or '
136
+ 'composite type.' % extension_handle.full_name)
137
+
138
+ # It's slightly wasteful to lookup the type checker each time,
139
+ # but we expect this to be a vanishingly uncommon case anyway.
140
+ type_checker = type_checkers.GetTypeChecker(extension_handle)
141
+ # pylint: disable=protected-access
142
+ self._extended_message._fields[extension_handle] = (
143
+ type_checker.CheckValue(value))
144
+ self._extended_message._Modified()
145
+
146
+ def __delitem__(self, extension_handle):
147
+ self._extended_message.ClearExtension(extension_handle)
148
+
149
+ def _FindExtensionByName(self, name):
150
+ """Tries to find a known extension with the specified name.
151
+
152
+ Args:
153
+ name: Extension full name.
154
+
155
+ Returns:
156
+ Extension field descriptor.
157
+ """
158
+ descriptor = self._extended_message.DESCRIPTOR
159
+ extensions = descriptor.file.pool._extensions_by_name[descriptor]
160
+ return extensions.get(name, None)
161
+
162
+ def _FindExtensionByNumber(self, number):
163
+ """Tries to find a known extension with the field number.
164
+
165
+ Args:
166
+ number: Extension field number.
167
+
168
+ Returns:
169
+ Extension field descriptor.
170
+ """
171
+ descriptor = self._extended_message.DESCRIPTOR
172
+ extensions = descriptor.file.pool._extensions_by_number[descriptor]
173
+ return extensions.get(number, None)
174
+
175
+ def __iter__(self):
176
+ # Return a generator over the populated extension fields
177
+ return (f[0] for f in self._extended_message.ListFields()
178
+ if f[0].is_extension)
179
+
180
+ def __contains__(self, extension_handle):
181
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
182
+
183
+ if extension_handle not in self._extended_message._fields:
184
+ return False
185
+
186
+ if extension_handle.is_repeated:
187
+ return bool(self._extended_message._fields.get(extension_handle))
188
+
189
+ if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
190
+ value = self._extended_message._fields.get(extension_handle)
191
+ # pylint: disable=protected-access
192
+ return value is not None and value._is_present_in_parent
193
+
194
+ return True
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/field_mask.py ADDED
@@ -0,0 +1,312 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains FieldMask class."""
9
+
10
+ from google.protobuf.descriptor import FieldDescriptor
11
+
12
+
13
+ class FieldMask(object):
14
+ """Class for FieldMask message type."""
15
+
16
+ __slots__ = ()
17
+
18
+ def ToJsonString(self):
19
+ """Converts FieldMask to string according to proto3 JSON spec."""
20
+ camelcase_paths = []
21
+ for path in self.paths:
22
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
23
+ return ','.join(camelcase_paths)
24
+
25
+ def FromJsonString(self, value):
26
+ """Converts string to FieldMask according to proto3 JSON spec."""
27
+ if not isinstance(value, str):
28
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
29
+ self.Clear()
30
+ if value:
31
+ for path in value.split(','):
32
+ self.paths.append(_CamelCaseToSnakeCase(path))
33
+
34
+ def IsValidForDescriptor(self, message_descriptor):
35
+ """Checks whether the FieldMask is valid for Message Descriptor."""
36
+ for path in self.paths:
37
+ if not _IsValidPath(message_descriptor, path):
38
+ return False
39
+ return True
40
+
41
+ def AllFieldsFromDescriptor(self, message_descriptor):
42
+ """Gets all direct fields of Message Descriptor to FieldMask."""
43
+ self.Clear()
44
+ for field in message_descriptor.fields:
45
+ self.paths.append(field.name)
46
+
47
+ def CanonicalFormFromMask(self, mask):
48
+ """Converts a FieldMask to the canonical form.
49
+
50
+ Removes paths that are covered by another path. For example,
51
+ "foo.bar" is covered by "foo" and will be removed if "foo"
52
+ is also in the FieldMask. Then sorts all paths in alphabetical order.
53
+
54
+ Args:
55
+ mask: The original FieldMask to be converted.
56
+ """
57
+ tree = _FieldMaskTree(mask)
58
+ tree.ToFieldMask(self)
59
+
60
+ def Union(self, mask1, mask2):
61
+ """Merges mask1 and mask2 into this FieldMask."""
62
+ _CheckFieldMaskMessage(mask1)
63
+ _CheckFieldMaskMessage(mask2)
64
+ tree = _FieldMaskTree(mask1)
65
+ tree.MergeFromFieldMask(mask2)
66
+ tree.ToFieldMask(self)
67
+
68
+ def Intersect(self, mask1, mask2):
69
+ """Intersects mask1 and mask2 into this FieldMask."""
70
+ _CheckFieldMaskMessage(mask1)
71
+ _CheckFieldMaskMessage(mask2)
72
+ tree = _FieldMaskTree(mask1)
73
+ intersection = _FieldMaskTree()
74
+ for path in mask2.paths:
75
+ tree.IntersectPath(path, intersection)
76
+ intersection.ToFieldMask(self)
77
+
78
+ def MergeMessage(
79
+ self, source, destination,
80
+ replace_message_field=False, replace_repeated_field=False):
81
+ """Merges fields specified in FieldMask from source to destination.
82
+
83
+ Args:
84
+ source: Source message.
85
+ destination: The destination message to be merged into.
86
+ replace_message_field: Replace message field if True. Merge message
87
+ field if False.
88
+ replace_repeated_field: Replace repeated field if True. Append
89
+ elements of repeated field if False.
90
+ """
91
+ tree = _FieldMaskTree(self)
92
+ tree.MergeMessage(
93
+ source, destination, replace_message_field, replace_repeated_field)
94
+
95
+
96
+ def _IsValidPath(message_descriptor, path):
97
+ """Checks whether the path is valid for Message Descriptor."""
98
+ parts = path.split('.')
99
+ last = parts.pop()
100
+ for name in parts:
101
+ field = message_descriptor.fields_by_name.get(name)
102
+ if (field is None or
103
+ field.is_repeated or
104
+ field.type != FieldDescriptor.TYPE_MESSAGE):
105
+ return False
106
+ message_descriptor = field.message_type
107
+ return last in message_descriptor.fields_by_name
108
+
109
+
110
+ def _CheckFieldMaskMessage(message):
111
+ """Raises ValueError if message is not a FieldMask."""
112
+ message_descriptor = message.DESCRIPTOR
113
+ if (message_descriptor.name != 'FieldMask' or
114
+ message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
115
+ raise ValueError('Message {0} is not a FieldMask.'.format(
116
+ message_descriptor.full_name))
117
+
118
+
119
+ def _SnakeCaseToCamelCase(path_name):
120
+ """Converts a path name from snake_case to camelCase."""
121
+ result = []
122
+ after_underscore = False
123
+ for c in path_name:
124
+ if c.isupper():
125
+ raise ValueError(
126
+ 'Fail to print FieldMask to Json string: Path name '
127
+ '{0} must not contain uppercase letters.'.format(path_name))
128
+ if after_underscore:
129
+ if c.islower():
130
+ result.append(c.upper())
131
+ after_underscore = False
132
+ else:
133
+ raise ValueError(
134
+ 'Fail to print FieldMask to Json string: The '
135
+ 'character after a "_" must be a lowercase letter '
136
+ 'in path name {0}.'.format(path_name))
137
+ elif c == '_':
138
+ after_underscore = True
139
+ else:
140
+ result += c
141
+
142
+ if after_underscore:
143
+ raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
144
+ 'in path name {0}.'.format(path_name))
145
+ return ''.join(result)
146
+
147
+
148
+ def _CamelCaseToSnakeCase(path_name):
149
+ """Converts a field name from camelCase to snake_case."""
150
+ result = []
151
+ for c in path_name:
152
+ if c == '_':
153
+ raise ValueError('Fail to parse FieldMask: Path name '
154
+ '{0} must not contain "_"s.'.format(path_name))
155
+ if c.isupper():
156
+ result += '_'
157
+ result += c.lower()
158
+ else:
159
+ result += c
160
+ return ''.join(result)
161
+
162
+
163
+ class _FieldMaskTree(object):
164
+ """Represents a FieldMask in a tree structure.
165
+
166
+ For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
167
+ the FieldMaskTree will be:
168
+ [_root] -+- foo -+- bar
169
+ | |
170
+ | +- baz
171
+ |
172
+ +- bar --- baz
173
+ In the tree, each leaf node represents a field path.
174
+ """
175
+
176
+ __slots__ = ('_root',)
177
+
178
+ def __init__(self, field_mask=None):
179
+ """Initializes the tree by FieldMask."""
180
+ self._root = {}
181
+ if field_mask:
182
+ self.MergeFromFieldMask(field_mask)
183
+
184
+ def MergeFromFieldMask(self, field_mask):
185
+ """Merges a FieldMask to the tree."""
186
+ for path in field_mask.paths:
187
+ self.AddPath(path)
188
+
189
+ def AddPath(self, path):
190
+ """Adds a field path into the tree.
191
+
192
+ If the field path to add is a sub-path of an existing field path
193
+ in the tree (i.e., a leaf node), it means the tree already matches
194
+ the given path so nothing will be added to the tree. If the path
195
+ matches an existing non-leaf node in the tree, that non-leaf node
196
+ will be turned into a leaf node with all its children removed because
197
+ the path matches all the node's children. Otherwise, a new path will
198
+ be added.
199
+
200
+ Args:
201
+ path: The field path to add.
202
+ """
203
+ node = self._root
204
+ for name in path.split('.'):
205
+ if name not in node:
206
+ node[name] = {}
207
+ elif not node[name]:
208
+ # Pre-existing empty node implies we already have this entire tree.
209
+ return
210
+ node = node[name]
211
+ # Remove any sub-trees we might have had.
212
+ node.clear()
213
+
214
+ def ToFieldMask(self, field_mask):
215
+ """Converts the tree to a FieldMask."""
216
+ field_mask.Clear()
217
+ _AddFieldPaths(self._root, '', field_mask)
218
+
219
+ def IntersectPath(self, path, intersection):
220
+ """Calculates the intersection part of a field path with this tree.
221
+
222
+ Args:
223
+ path: The field path to calculates.
224
+ intersection: The out tree to record the intersection part.
225
+ """
226
+ node = self._root
227
+ for name in path.split('.'):
228
+ if name not in node:
229
+ return
230
+ elif not node[name]:
231
+ intersection.AddPath(path)
232
+ return
233
+ node = node[name]
234
+ intersection.AddLeafNodes(path, node)
235
+
236
+ def AddLeafNodes(self, prefix, node):
237
+ """Adds leaf nodes begin with prefix to this tree."""
238
+ if not node:
239
+ self.AddPath(prefix)
240
+ for name in node:
241
+ child_path = prefix + '.' + name
242
+ self.AddLeafNodes(child_path, node[name])
243
+
244
+ def MergeMessage(
245
+ self, source, destination,
246
+ replace_message, replace_repeated):
247
+ """Merge all fields specified by this tree from source to destination."""
248
+ _MergeMessage(
249
+ self._root, source, destination, replace_message, replace_repeated)
250
+
251
+
252
+ def _StrConvert(value):
253
+ """Converts value to str if it is not."""
254
+ # This file is imported by c extension and some methods like ClearField
255
+ # requires string for the field name. py2/py3 has different text
256
+ # type and may use unicode.
257
+ if not isinstance(value, str):
258
+ return value.encode('utf-8')
259
+ return value
260
+
261
+
262
+ def _MergeMessage(
263
+ node, source, destination, replace_message, replace_repeated):
264
+ """Merge all fields specified by a sub-tree from source to destination."""
265
+ source_descriptor = source.DESCRIPTOR
266
+ for name in node:
267
+ child = node[name]
268
+ field = source_descriptor.fields_by_name[name]
269
+ if field is None:
270
+ raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
271
+ name, source_descriptor.full_name))
272
+ if child:
273
+ # Sub-paths are only allowed for singular message fields.
274
+ if (field.is_repeated or
275
+ field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
276
+ raise ValueError('Error: Field {0} in message {1} is not a singular '
277
+ 'message field and cannot have sub-fields.'.format(
278
+ name, source_descriptor.full_name))
279
+ if source.HasField(name):
280
+ _MergeMessage(
281
+ child, getattr(source, name), getattr(destination, name),
282
+ replace_message, replace_repeated)
283
+ continue
284
+ if field.is_repeated:
285
+ if replace_repeated:
286
+ destination.ClearField(_StrConvert(name))
287
+ repeated_source = getattr(source, name)
288
+ repeated_destination = getattr(destination, name)
289
+ repeated_destination.MergeFrom(repeated_source)
290
+ else:
291
+ if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
292
+ if replace_message:
293
+ destination.ClearField(_StrConvert(name))
294
+ if source.HasField(name):
295
+ getattr(destination, name).MergeFrom(getattr(source, name))
296
+ elif not field.has_presence or source.HasField(name):
297
+ setattr(destination, name, getattr(source, name))
298
+ else:
299
+ destination.ClearField(_StrConvert(name))
300
+
301
+
302
+ def _AddFieldPaths(node, prefix, field_mask):
303
+ """Adds the field paths descended from node to field_mask."""
304
+ if not node and prefix:
305
+ field_mask.paths.append(prefix)
306
+ return
307
+ for name in sorted(node):
308
+ if prefix:
309
+ child_path = prefix + '.' + name
310
+ else:
311
+ child_path = name
312
+ _AddFieldPaths(node[name], child_path, field_mask)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/message_listener.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Defines a listener interface for observing certain
9
+ state transitions on Message objects.
10
+
11
+ Also defines a null implementation of this interface.
12
+ """
13
+
14
+ __author__ = 'robinson@google.com (Will Robinson)'
15
+
16
+
17
+ class MessageListener(object):
18
+
19
+ """Listens for modifications made to a message. Meant to be registered via
20
+ Message._SetListener().
21
+
22
+ Attributes:
23
+ dirty: If True, then calling Modified() would be a no-op. This can be
24
+ used to avoid these calls entirely in the common case.
25
+ """
26
+
27
+ def Modified(self):
28
+ """Called every time the message is modified in such a way that the parent
29
+ message may need to be updated. This currently means either:
30
+ (a) The message was modified for the first time, so the parent message
31
+ should henceforth mark the message as present.
32
+ (b) The message's cached byte size became dirty -- i.e. the message was
33
+ modified for the first time after a previous call to ByteSize().
34
+ Therefore the parent should also mark its byte size as dirty.
35
+ Note that (a) implies (b), since new objects start out with a client cached
36
+ size (zero). However, we document (a) explicitly because it is important.
37
+
38
+ Modified() will *only* be called in response to one of these two events --
39
+ not every time the sub-message is modified.
40
+
41
+ Note that if the listener's |dirty| attribute is true, then calling
42
+ Modified at the moment would be a no-op, so it can be skipped. Performance-
43
+ sensitive callers should check this attribute directly before calling since
44
+ it will be true most of the time.
45
+ """
46
+
47
+ raise NotImplementedError
48
+
49
+
50
+ class NullMessageListener(object):
51
+
52
+ """No-op MessageListener implementation."""
53
+
54
+ def Modified(self):
55
+ pass
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/python_edition_defaults.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ """
2
+ This file contains the serialized FeatureSetDefaults object corresponding to
3
+ the Pure Python runtime. This is used for feature resolution under Editions.
4
+ """
5
+ _PROTOBUF_INTERNAL_PYTHON_EDITION_DEFAULTS = b"\n\027\030\204\007\"\000*\020\010\001\020\002\030\002 \003(\0010\0028\002@\001\n\027\030\347\007\"\000*\020\010\002\020\001\030\001 \002(\0010\0018\002@\001\n\027\030\350\007\"\014\010\001\020\001\030\001 \002(\0010\001*\0048\002@\001\n\027\030\351\007\"\020\010\001\020\001\030\001 \002(\0010\0018\001@\002*\000 \346\007(\351\007"
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/python_message.py ADDED
@@ -0,0 +1,1591 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ # This code is meant to work on Python 2.4 and above only.
9
+ #
10
+ # TODO: Helpers for verbose, common checks like seeing if a
11
+ # descriptor's cpp_type is CPPTYPE_MESSAGE.
12
+
13
+ """Contains a metaclass and helper functions used to create
14
+ protocol message classes from Descriptor objects at runtime.
15
+
16
+ Recall that a metaclass is the "type" of a class.
17
+ (A class is to a metaclass what an instance is to a class.)
18
+
19
+ In this case, we use the GeneratedProtocolMessageType metaclass
20
+ to inject all the useful functionality into the classes
21
+ output by the protocol compiler at compile-time.
22
+
23
+ The upshot of all this is that the real implementation
24
+ details for ALL pure-Python protocol buffers are *here in
25
+ this file*.
26
+ """
27
+
28
+ __author__ = 'robinson@google.com (Will Robinson)'
29
+
30
+ import datetime
31
+ from io import BytesIO
32
+ import math
33
+ import struct
34
+ import sys
35
+ import warnings
36
+ import weakref
37
+
38
+ from google.protobuf import descriptor as descriptor_mod
39
+ from google.protobuf import message as message_mod
40
+ from google.protobuf import text_format
41
+ # We use "as" to avoid name collisions with variables.
42
+ from google.protobuf.internal import api_implementation
43
+ from google.protobuf.internal import containers
44
+ from google.protobuf.internal import decoder
45
+ from google.protobuf.internal import encoder
46
+ from google.protobuf.internal import enum_type_wrapper
47
+ from google.protobuf.internal import extension_dict
48
+ from google.protobuf.internal import message_listener as message_listener_mod
49
+ from google.protobuf.internal import type_checkers
50
+ from google.protobuf.internal import well_known_types
51
+ from google.protobuf.internal import wire_format
52
+
53
+ _FieldDescriptor = descriptor_mod.FieldDescriptor
54
+ _AnyFullTypeName = 'google.protobuf.Any'
55
+ _StructFullTypeName = 'google.protobuf.Struct'
56
+ _ListValueFullTypeName = 'google.protobuf.ListValue'
57
+ _ExtensionDict = extension_dict._ExtensionDict
58
+
59
+ class GeneratedProtocolMessageType(type):
60
+
61
+ """Metaclass for protocol message classes created at runtime from Descriptors.
62
+
63
+ We add implementations for all methods described in the Message class. We
64
+ also create properties to allow getting/setting all fields in the protocol
65
+ message. Finally, we create slots to prevent users from accidentally
66
+ "setting" nonexistent fields in the protocol message, which then wouldn't get
67
+ serialized / deserialized properly.
68
+
69
+ The protocol compiler currently uses this metaclass to create protocol
70
+ message classes at runtime. Clients can also manually create their own
71
+ classes at runtime, as in this example:
72
+
73
+ mydescriptor = Descriptor(.....)
74
+ factory = symbol_database.Default()
75
+ factory.pool.AddDescriptor(mydescriptor)
76
+ MyProtoClass = message_factory.GetMessageClass(mydescriptor)
77
+ myproto_instance = MyProtoClass()
78
+ myproto.foo_field = 23
79
+ ...
80
+ """
81
+
82
+ # Must be consistent with the protocol-compiler code in
83
+ # proto2/compiler/internal/generator.*.
84
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
85
+
86
+ def __new__(cls, name, bases, dictionary):
87
+ """Custom allocation for runtime-generated class types.
88
+
89
+ We override __new__ because this is apparently the only place
90
+ where we can meaningfully set __slots__ on the class we're creating(?).
91
+ (The interplay between metaclasses and slots is not very well-documented).
92
+
93
+ Args:
94
+ name: Name of the class (ignored, but required by the
95
+ metaclass protocol).
96
+ bases: Base classes of the class we're constructing.
97
+ (Should be message.Message). We ignore this field, but
98
+ it's required by the metaclass protocol
99
+ dictionary: The class dictionary of the class we're
100
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
101
+ a Descriptor object describing this protocol message
102
+ type.
103
+
104
+ Returns:
105
+ Newly-allocated class.
106
+
107
+ Raises:
108
+ RuntimeError: Generated code only work with python cpp extension.
109
+ """
110
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
111
+
112
+ if isinstance(descriptor, str):
113
+ raise RuntimeError('The generated code only work with python cpp '
114
+ 'extension, but it is using pure python runtime.')
115
+
116
+ # If a concrete class already exists for this descriptor, don't try to
117
+ # create another. Doing so will break any messages that already exist with
118
+ # the existing class.
119
+ #
120
+ # The C++ implementation appears to have its own internal `PyMessageFactory`
121
+ # to achieve similar results.
122
+ #
123
+ # This most commonly happens in `text_format.py` when using descriptors from
124
+ # a custom pool; it calls message_factory.GetMessageClass() on a
125
+ # descriptor which already has an existing concrete class.
126
+ new_class = getattr(descriptor, '_concrete_class', None)
127
+ if new_class:
128
+ return new_class
129
+
130
+ if descriptor.full_name in well_known_types.WKTBASES:
131
+ bases += (well_known_types.WKTBASES[descriptor.full_name],)
132
+ _AddClassAttributesForNestedExtensions(descriptor, dictionary)
133
+ _AddSlots(descriptor, dictionary)
134
+
135
+ superclass = super(GeneratedProtocolMessageType, cls)
136
+ new_class = superclass.__new__(cls, name, bases, dictionary)
137
+ return new_class
138
+
139
+ def __init__(cls, name, bases, dictionary):
140
+ """Here we perform the majority of our work on the class.
141
+ We add enum getters, an __init__ method, implementations
142
+ of all Message methods, and properties for all fields
143
+ in the protocol type.
144
+
145
+ Args:
146
+ name: Name of the class (ignored, but required by the
147
+ metaclass protocol).
148
+ bases: Base classes of the class we're constructing.
149
+ (Should be message.Message). We ignore this field, but
150
+ it's required by the metaclass protocol
151
+ dictionary: The class dictionary of the class we're
152
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
153
+ a Descriptor object describing this protocol message
154
+ type.
155
+ """
156
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
157
+
158
+ # If this is an _existing_ class looked up via `_concrete_class` in the
159
+ # __new__ method above, then we don't need to re-initialize anything.
160
+ existing_class = getattr(descriptor, '_concrete_class', None)
161
+ if existing_class:
162
+ assert existing_class is cls, (
163
+ 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r'
164
+ % (descriptor.full_name))
165
+ return
166
+
167
+ cls._message_set_decoders_by_tag = {}
168
+ cls._fields_by_tag = {}
169
+ if (descriptor.has_options and
170
+ descriptor.GetOptions().message_set_wire_format):
171
+ cls._message_set_decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
172
+ decoder.MessageSetItemDecoder(descriptor),
173
+ None,
174
+ )
175
+
176
+ # Attach stuff to each FieldDescriptor for quick lookup later on.
177
+ for field in descriptor.fields:
178
+ _AttachFieldHelpers(cls, field)
179
+
180
+ if descriptor.is_extendable and hasattr(descriptor.file, 'pool'):
181
+ extensions = descriptor.file.pool.FindAllExtensions(descriptor)
182
+ for ext in extensions:
183
+ _AttachFieldHelpers(cls, ext)
184
+
185
+ descriptor._concrete_class = cls # pylint: disable=protected-access
186
+ _AddEnumValues(descriptor, cls)
187
+ _AddInitMethod(descriptor, cls)
188
+ _AddPropertiesForFields(descriptor, cls)
189
+ _AddPropertiesForExtensions(descriptor, cls)
190
+ _AddStaticMethods(cls)
191
+ _AddMessageMethods(descriptor, cls)
192
+ _AddPrivateHelperMethods(descriptor, cls)
193
+
194
+ superclass = super(GeneratedProtocolMessageType, cls)
195
+ superclass.__init__(name, bases, dictionary)
196
+
197
+
198
+ # Stateless helpers for GeneratedProtocolMessageType below.
199
+ # Outside clients should not access these directly.
200
+ #
201
+ # I opted not to make any of these methods on the metaclass, to make it more
202
+ # clear that I'm not really using any state there and to keep clients from
203
+ # thinking that they have direct access to these construction helpers.
204
+
205
+
206
+ def _PropertyName(proto_field_name):
207
+ """Returns the name of the public property attribute which
208
+ clients can use to get and (in some cases) set the value
209
+ of a protocol message field.
210
+
211
+ Args:
212
+ proto_field_name: The protocol message field name, exactly
213
+ as it appears (or would appear) in a .proto file.
214
+ """
215
+ # TODO: Escape Python keywords (e.g., yield), and test this support.
216
+ # nnorwitz makes my day by writing:
217
+ # """
218
+ # FYI. See the keyword module in the stdlib. This could be as simple as:
219
+ #
220
+ # if keyword.iskeyword(proto_field_name):
221
+ # return proto_field_name + "_"
222
+ # return proto_field_name
223
+ # """
224
+ # Kenton says: The above is a BAD IDEA. People rely on being able to use
225
+ # getattr() and setattr() to reflectively manipulate field values. If we
226
+ # rename the properties, then every such user has to also make sure to apply
227
+ # the same transformation. Note that currently if you name a field "yield",
228
+ # you can still access it just fine using getattr/setattr -- it's not even
229
+ # that cumbersome to do so.
230
+ # TODO: Remove this method entirely if/when everyone agrees with my
231
+ # position.
232
+ return proto_field_name
233
+
234
+
235
+ def _AddSlots(message_descriptor, dictionary):
236
+ """Adds a __slots__ entry to dictionary, containing the names of all valid
237
+ attributes for this message type.
238
+
239
+ Args:
240
+ message_descriptor: A Descriptor instance describing this message type.
241
+ dictionary: Class dictionary to which we'll add a '__slots__' entry.
242
+ """
243
+ dictionary['__slots__'] = ['_cached_byte_size',
244
+ '_cached_byte_size_dirty',
245
+ '_fields',
246
+ '_unknown_fields',
247
+ '_is_present_in_parent',
248
+ '_listener',
249
+ '_listener_for_children',
250
+ '__weakref__',
251
+ '_oneofs']
252
+
253
+
254
+ def _IsMessageSetExtension(field):
255
+ return (field.is_extension and
256
+ field.containing_type.has_options and
257
+ field.containing_type.GetOptions().message_set_wire_format and
258
+ field.type == _FieldDescriptor.TYPE_MESSAGE and
259
+ not field.is_required and
260
+ not field.is_repeated)
261
+
262
+
263
+ def _IsMapField(field):
264
+ return (field.type == _FieldDescriptor.TYPE_MESSAGE and
265
+ field.message_type._is_map_entry)
266
+
267
+
268
+ def _IsMessageMapField(field):
269
+ value_type = field.message_type.fields_by_name['value']
270
+ return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
271
+
272
+ def _AttachFieldHelpers(cls, field_descriptor):
273
+ field_descriptor._default_constructor = _DefaultValueConstructorForField(
274
+ field_descriptor
275
+ )
276
+
277
+ def AddFieldByTag(wiretype, is_packed):
278
+ tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
279
+ cls._fields_by_tag[tag_bytes] = (field_descriptor, is_packed)
280
+
281
+ AddFieldByTag(
282
+ type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], False
283
+ )
284
+
285
+ if field_descriptor.is_repeated and wire_format.IsTypePackable(
286
+ field_descriptor.type
287
+ ):
288
+ # To support wire compatibility of adding packed = true, add a decoder for
289
+ # packed values regardless of the field's options.
290
+ AddFieldByTag(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
291
+
292
+
293
+ def _MaybeAddEncoder(cls, field_descriptor):
294
+ if hasattr(field_descriptor, '_encoder'):
295
+ return
296
+ is_repeated = field_descriptor.is_repeated
297
+ is_map_entry = _IsMapField(field_descriptor)
298
+ is_packed = field_descriptor.is_packed
299
+
300
+ if is_map_entry:
301
+ field_encoder = encoder.MapEncoder(field_descriptor)
302
+ sizer = encoder.MapSizer(field_descriptor,
303
+ _IsMessageMapField(field_descriptor))
304
+ elif _IsMessageSetExtension(field_descriptor):
305
+ field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
306
+ sizer = encoder.MessageSetItemSizer(field_descriptor.number)
307
+ else:
308
+ field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
309
+ field_descriptor.number, is_repeated, is_packed)
310
+ sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
311
+ field_descriptor.number, is_repeated, is_packed)
312
+
313
+ field_descriptor._sizer = sizer
314
+ field_descriptor._encoder = field_encoder
315
+
316
+
317
+ def _MaybeAddDecoder(cls, field_descriptor):
318
+ if hasattr(field_descriptor, '_decoders'):
319
+ return
320
+
321
+ is_repeated = field_descriptor.is_repeated
322
+ is_map_entry = _IsMapField(field_descriptor)
323
+ helper_decoders = {}
324
+
325
+ def AddDecoder(is_packed):
326
+ decode_type = field_descriptor.type
327
+ if (decode_type == _FieldDescriptor.TYPE_ENUM and
328
+ not field_descriptor.enum_type.is_closed):
329
+ decode_type = _FieldDescriptor.TYPE_INT32
330
+
331
+ oneof_descriptor = None
332
+ if field_descriptor.containing_oneof is not None:
333
+ oneof_descriptor = field_descriptor
334
+
335
+ if is_map_entry:
336
+ is_message_map = _IsMessageMapField(field_descriptor)
337
+
338
+ field_decoder = decoder.MapDecoder(
339
+ field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
340
+ is_message_map)
341
+ elif decode_type == _FieldDescriptor.TYPE_STRING:
342
+ field_decoder = decoder.StringDecoder(
343
+ field_descriptor.number, is_repeated, is_packed,
344
+ field_descriptor, field_descriptor._default_constructor,
345
+ not field_descriptor.has_presence)
346
+ elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
347
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
348
+ field_descriptor.number, is_repeated, is_packed,
349
+ field_descriptor, field_descriptor._default_constructor)
350
+ else:
351
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
352
+ field_descriptor.number, is_repeated, is_packed,
353
+ # pylint: disable=protected-access
354
+ field_descriptor, field_descriptor._default_constructor,
355
+ not field_descriptor.has_presence)
356
+
357
+ helper_decoders[is_packed] = field_decoder
358
+
359
+ AddDecoder(False)
360
+
361
+ if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
362
+ # To support wire compatibility of adding packed = true, add a decoder for
363
+ # packed values regardless of the field's options.
364
+ AddDecoder(True)
365
+
366
+ field_descriptor._decoders = helper_decoders
367
+
368
+
369
+ def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
370
+ extensions = descriptor.extensions_by_name
371
+ for extension_name, extension_field in extensions.items():
372
+ assert extension_name not in dictionary
373
+ dictionary[extension_name] = extension_field
374
+
375
+
376
+ def _AddEnumValues(descriptor, cls):
377
+ """Sets class-level attributes for all enum fields defined in this message.
378
+
379
+ Also exporting a class-level object that can name enum values.
380
+
381
+ Args:
382
+ descriptor: Descriptor object for this message type.
383
+ cls: Class we're constructing for this message type.
384
+ """
385
+ for enum_type in descriptor.enum_types:
386
+ setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
387
+ for enum_value in enum_type.values:
388
+ setattr(cls, enum_value.name, enum_value.number)
389
+
390
+
391
+ def _GetInitializeDefaultForMap(field):
392
+ if not field.is_repeated:
393
+ raise ValueError('map_entry set on non-repeated field %s' % (
394
+ field.name))
395
+ fields_by_name = field.message_type.fields_by_name
396
+ key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
397
+
398
+ value_field = fields_by_name['value']
399
+ if _IsMessageMapField(field):
400
+ def MakeMessageMapDefault(message):
401
+ return containers.MessageMap(
402
+ message._listener_for_children, value_field.message_type, key_checker,
403
+ field.message_type)
404
+ return MakeMessageMapDefault
405
+ else:
406
+ value_checker = type_checkers.GetTypeChecker(value_field)
407
+ def MakePrimitiveMapDefault(message):
408
+ return containers.ScalarMap(
409
+ message._listener_for_children, key_checker, value_checker,
410
+ field.message_type)
411
+ return MakePrimitiveMapDefault
412
+
413
+ def _DefaultValueConstructorForField(field):
414
+ """Returns a function which returns a default value for a field.
415
+
416
+ Args:
417
+ field: FieldDescriptor object for this field.
418
+
419
+ The returned function has one argument:
420
+ message: Message instance containing this field, or a weakref proxy
421
+ of same.
422
+
423
+ That function in turn returns a default value for this field. The default
424
+ value may refer back to |message| via a weak reference.
425
+ """
426
+
427
+ if _IsMapField(field):
428
+ return _GetInitializeDefaultForMap(field)
429
+
430
+ if field.is_repeated:
431
+ if field.has_default_value and field.default_value != []:
432
+ raise ValueError('Repeated field default value not empty list: %s' % (
433
+ field.default_value))
434
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
435
+ # We can't look at _concrete_class yet since it might not have
436
+ # been set. (Depends on order in which we initialize the classes).
437
+ message_type = field.message_type
438
+ def MakeRepeatedMessageDefault(message):
439
+ return containers.RepeatedCompositeFieldContainer(
440
+ message._listener_for_children, field.message_type)
441
+ return MakeRepeatedMessageDefault
442
+ else:
443
+ type_checker = type_checkers.GetTypeChecker(field)
444
+ def MakeRepeatedScalarDefault(message):
445
+ return containers.RepeatedScalarFieldContainer(
446
+ message._listener_for_children, type_checker)
447
+ return MakeRepeatedScalarDefault
448
+
449
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
450
+ message_type = field.message_type
451
+ def MakeSubMessageDefault(message):
452
+ # _concrete_class may not yet be initialized.
453
+ if not hasattr(message_type, '_concrete_class'):
454
+ from google.protobuf import message_factory
455
+ message_factory.GetMessageClass(message_type)
456
+ result = message_type._concrete_class()
457
+ result._SetListener(
458
+ _OneofListener(message, field)
459
+ if field.containing_oneof is not None
460
+ else message._listener_for_children)
461
+ return result
462
+ return MakeSubMessageDefault
463
+
464
+ def MakeScalarDefault(message):
465
+ # TODO: This may be broken since there may not be
466
+ # default_value. Combine with has_default_value somehow.
467
+ return field.default_value
468
+ return MakeScalarDefault
469
+
470
+
471
+ def _ReraiseTypeErrorWithFieldName(message_name, field_name):
472
+ """Re-raise the currently-handled TypeError with the field name added."""
473
+ exc = sys.exc_info()[1]
474
+ if len(exc.args) == 1 and type(exc) is TypeError:
475
+ # simple TypeError; add field name to exception message
476
+ exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
477
+
478
+ # re-raise possibly-amended exception with original traceback:
479
+ raise exc.with_traceback(sys.exc_info()[2])
480
+
481
+
482
+ def _AddInitMethod(message_descriptor, cls):
483
+ """Adds an __init__ method to cls."""
484
+
485
+ def _GetIntegerEnumValue(enum_type, value):
486
+ """Convert a string or integer enum value to an integer.
487
+
488
+ If the value is a string, it is converted to the enum value in
489
+ enum_type with the same name. If the value is not a string, it's
490
+ returned as-is. (No conversion or bounds-checking is done.)
491
+ """
492
+ if isinstance(value, str):
493
+ try:
494
+ return enum_type.values_by_name[value].number
495
+ except KeyError:
496
+ raise ValueError('Enum type %s: unknown label "%s"' % (
497
+ enum_type.full_name, value))
498
+ return value
499
+
500
+ def init(self, **kwargs):
501
+ self._cached_byte_size = 0
502
+ self._cached_byte_size_dirty = len(kwargs) > 0
503
+ self._fields = {}
504
+ # Contains a mapping from oneof field descriptors to the descriptor
505
+ # of the currently set field in that oneof field.
506
+ self._oneofs = {}
507
+
508
+ # _unknown_fields is () when empty for efficiency, and will be turned into
509
+ # a list if fields are added.
510
+ self._unknown_fields = ()
511
+ self._is_present_in_parent = False
512
+ self._listener = message_listener_mod.NullMessageListener()
513
+ self._listener_for_children = _Listener(self)
514
+ for field_name, field_value in kwargs.items():
515
+ field = _GetFieldByName(message_descriptor, field_name)
516
+ if field is None:
517
+ raise TypeError('%s() got an unexpected keyword argument "%s"' %
518
+ (message_descriptor.name, field_name))
519
+ if field_value is None:
520
+ # field=None is the same as no field at all.
521
+ continue
522
+ if field.is_repeated:
523
+ field_copy = field._default_constructor(self)
524
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
525
+ if _IsMapField(field):
526
+ if _IsMessageMapField(field):
527
+ for key in field_value:
528
+ item_value = field_value[key]
529
+ if isinstance(item_value, dict):
530
+ field_copy[key].__init__(**item_value)
531
+ else:
532
+ field_copy[key].MergeFrom(item_value)
533
+ else:
534
+ field_copy.update(field_value)
535
+ else:
536
+ for val in field_value:
537
+ if isinstance(val, dict):
538
+ field_copy.add(**val)
539
+ else:
540
+ field_copy.add().MergeFrom(val)
541
+ else: # Scalar
542
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
543
+ field_value = [_GetIntegerEnumValue(field.enum_type, val)
544
+ for val in field_value]
545
+ field_copy.extend(field_value)
546
+ self._fields[field] = field_copy
547
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
548
+ field_copy = field._default_constructor(self)
549
+ new_val = None
550
+ if isinstance(field_value, message_mod.Message):
551
+ new_val = field_value
552
+ elif isinstance(field_value, dict):
553
+ if field.message_type.full_name == _StructFullTypeName:
554
+ field_copy.Clear()
555
+ if len(field_value) == 1 and 'fields' in field_value:
556
+ try:
557
+ field_copy.update(field_value)
558
+ except:
559
+ # Fall back to init normal message field
560
+ field_copy.Clear()
561
+ new_val = field.message_type._concrete_class(**field_value)
562
+ else:
563
+ field_copy.update(field_value)
564
+ else:
565
+ new_val = field.message_type._concrete_class(**field_value)
566
+ elif hasattr(field_copy, '_internal_assign'):
567
+ field_copy._internal_assign(field_value)
568
+ else:
569
+ raise TypeError(
570
+ 'Message field {0}.{1} must be initialized with a '
571
+ 'dict or instance of same class, got {2}.'.format(
572
+ message_descriptor.name,
573
+ field_name,
574
+ type(field_value).__name__,
575
+ )
576
+ )
577
+
578
+ if new_val != None:
579
+ try:
580
+ field_copy.MergeFrom(new_val)
581
+ except TypeError:
582
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
583
+ self._fields[field] = field_copy
584
+ else:
585
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
586
+ field_value = _GetIntegerEnumValue(field.enum_type, field_value)
587
+ try:
588
+ setattr(self, field_name, field_value)
589
+ except TypeError:
590
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
591
+
592
+ init.__module__ = None
593
+ init.__doc__ = None
594
+ cls.__init__ = init
595
+
596
+
597
+ def _GetFieldByName(message_descriptor, field_name):
598
+ """Returns a field descriptor by field name.
599
+
600
+ Args:
601
+ message_descriptor: A Descriptor describing all fields in message.
602
+ field_name: The name of the field to retrieve.
603
+ Returns:
604
+ The field descriptor associated with the field name.
605
+ """
606
+ try:
607
+ return message_descriptor.fields_by_name[field_name]
608
+ except KeyError:
609
+ raise ValueError('Protocol message %s has no "%s" field.' %
610
+ (message_descriptor.name, field_name))
611
+
612
+
613
+ def _AddPropertiesForFields(descriptor, cls):
614
+ """Adds properties for all fields in this protocol message type."""
615
+ for field in descriptor.fields:
616
+ _AddPropertiesForField(field, cls)
617
+
618
+ if descriptor.is_extendable:
619
+ # _ExtensionDict is just an adaptor with no state so we allocate a new one
620
+ # every time it is accessed.
621
+ cls.Extensions = property(lambda self: _ExtensionDict(self))
622
+
623
+
624
+ def _AddPropertiesForField(field, cls):
625
+ """Adds a public property for a protocol message field.
626
+ Clients can use this property to get and (in the case
627
+ of non-repeated scalar fields) directly set the value
628
+ of a protocol message field.
629
+
630
+ Args:
631
+ field: A FieldDescriptor for this field.
632
+ cls: The class we're constructing.
633
+ """
634
+ # Catch it if we add other types that we should
635
+ # handle specially here.
636
+ assert _FieldDescriptor.MAX_CPPTYPE == 10
637
+
638
+ constant_name = field.name.upper() + '_FIELD_NUMBER'
639
+ setattr(cls, constant_name, field.number)
640
+
641
+ if field.is_repeated:
642
+ _AddPropertiesForRepeatedField(field, cls)
643
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
644
+ _AddPropertiesForNonRepeatedCompositeField(field, cls)
645
+ else:
646
+ _AddPropertiesForNonRepeatedScalarField(field, cls)
647
+
648
+
649
+ class _FieldProperty(property):
650
+ __slots__ = ('DESCRIPTOR',)
651
+
652
+ def __init__(self, descriptor, getter, setter, doc):
653
+ property.__init__(self, getter, setter, doc=doc)
654
+ self.DESCRIPTOR = descriptor
655
+
656
+
657
+ def _AddPropertiesForRepeatedField(field, cls):
658
+ """Adds a public property for a "repeated" protocol message field. Clients
659
+ can use this property to get the value of the field, which will be either a
660
+ RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see
661
+ below).
662
+
663
+ Note that when clients add values to these containers, we perform
664
+ type-checking in the case of repeated scalar fields, and we also set any
665
+ necessary "has" bits as a side-effect.
666
+
667
+ Args:
668
+ field: A FieldDescriptor for this field.
669
+ cls: The class we're constructing.
670
+ """
671
+ proto_field_name = field.name
672
+ property_name = _PropertyName(proto_field_name)
673
+
674
+ def getter(self):
675
+ field_value = self._fields.get(field)
676
+ if field_value is None:
677
+ # Construct a new object to represent this field.
678
+ field_value = field._default_constructor(self)
679
+
680
+ # Atomically check if another thread has preempted us and, if not, swap
681
+ # in the new object we just created. If someone has preempted us, we
682
+ # take that object and discard ours.
683
+ # WARNING: We are relying on setdefault() being atomic. This is true
684
+ # in CPython but we haven't investigated others. This warning appears
685
+ # in several other locations in this file.
686
+ field_value = self._fields.setdefault(field, field_value)
687
+ return field_value
688
+ getter.__module__ = None
689
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
690
+
691
+ # We define a setter just so we can throw an exception with a more
692
+ # helpful error message.
693
+ def setter(self, new_value):
694
+ raise AttributeError('Assignment not allowed to repeated field '
695
+ '"%s" in protocol message object.' % proto_field_name)
696
+
697
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
698
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
699
+
700
+
701
+ def _AddPropertiesForNonRepeatedScalarField(field, cls):
702
+ """Adds a public property for a nonrepeated, scalar protocol message field.
703
+ Clients can use this property to get and directly set the value of the field.
704
+ Note that when the client sets the value of a field by using this property,
705
+ all necessary "has" bits are set as a side-effect, and we also perform
706
+ type-checking.
707
+
708
+ Args:
709
+ field: A FieldDescriptor for this field.
710
+ cls: The class we're constructing.
711
+ """
712
+ proto_field_name = field.name
713
+ property_name = _PropertyName(proto_field_name)
714
+ type_checker = type_checkers.GetTypeChecker(field)
715
+ default_value = field.default_value
716
+
717
+ def getter(self):
718
+ # TODO: This may be broken since there may not be
719
+ # default_value. Combine with has_default_value somehow.
720
+ return self._fields.get(field, default_value)
721
+ getter.__module__ = None
722
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
723
+
724
+ def field_setter(self, new_value):
725
+ # pylint: disable=protected-access
726
+ # Testing the value for truthiness captures all of the implicit presence
727
+ # defaults (0, 0.0, enum 0, and False), except for -0.0.
728
+ try:
729
+ new_value = type_checker.CheckValue(new_value)
730
+ except TypeError as e:
731
+ raise TypeError(
732
+ 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
733
+ if not field.has_presence and decoder.IsDefaultScalarValue(new_value):
734
+ self._fields.pop(field, None)
735
+ else:
736
+ self._fields[field] = new_value
737
+ # Check _cached_byte_size_dirty inline to improve performance, since scalar
738
+ # setters are called frequently.
739
+ if not self._cached_byte_size_dirty:
740
+ self._Modified()
741
+
742
+ if field.containing_oneof:
743
+ def setter(self, new_value):
744
+ field_setter(self, new_value)
745
+ self._UpdateOneofState(field)
746
+ else:
747
+ setter = field_setter
748
+
749
+ setter.__module__ = None
750
+ setter.__doc__ = 'Setter for %s.' % proto_field_name
751
+
752
+ # Add a property to encapsulate the getter/setter.
753
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
754
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
755
+
756
+
757
+ def _AddPropertiesForNonRepeatedCompositeField(field, cls):
758
+ """Adds a public property for a nonrepeated, composite protocol message field.
759
+ A composite field is a "group" or "message" field.
760
+
761
+ Clients can use this property to get the value of the field, but cannot
762
+ assign to the property directly.
763
+
764
+ Args:
765
+ field: A FieldDescriptor for this field.
766
+ cls: The class we're constructing.
767
+ """
768
+ # TODO: Remove duplication with similar method
769
+ # for non-repeated scalars.
770
+ proto_field_name = field.name
771
+ property_name = _PropertyName(proto_field_name)
772
+
773
+ def getter(self):
774
+ field_value = self._fields.get(field)
775
+ if field_value is None:
776
+ # Construct a new object to represent this field.
777
+ field_value = field._default_constructor(self)
778
+
779
+ # Atomically check if another thread has preempted us and, if not, swap
780
+ # in the new object we just created. If someone has preempted us, we
781
+ # take that object and discard ours.
782
+ # WARNING: We are relying on setdefault() being atomic. This is true
783
+ # in CPython but we haven't investigated others. This warning appears
784
+ # in several other locations in this file.
785
+ field_value = self._fields.setdefault(field, field_value)
786
+ return field_value
787
+ getter.__module__ = None
788
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
789
+
790
+ # We define a setter just so we can throw an exception with a more
791
+ # helpful error message.
792
+ def setter(self, new_value):
793
+ if field.message_type.full_name == 'google.protobuf.Timestamp':
794
+ getter(self)
795
+ self._fields[field].FromDatetime(new_value)
796
+ elif field.message_type.full_name == 'google.protobuf.Duration':
797
+ getter(self)
798
+ self._fields[field].FromTimedelta(new_value)
799
+ elif field.message_type.full_name == _StructFullTypeName:
800
+ getter(self)
801
+ self._fields[field].Clear()
802
+ self._fields[field].update(new_value)
803
+ elif field.message_type.full_name == _ListValueFullTypeName:
804
+ getter(self)
805
+ self._fields[field].Clear()
806
+ self._fields[field].extend(new_value)
807
+ else:
808
+ raise AttributeError(
809
+ 'Assignment not allowed to composite field '
810
+ '"%s" in protocol message object.' % proto_field_name
811
+ )
812
+
813
+ # Add a property to encapsulate the getter.
814
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
815
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
816
+
817
+
818
+ def _AddPropertiesForExtensions(descriptor, cls):
819
+ """Adds properties for all fields in this protocol message type."""
820
+ extensions = descriptor.extensions_by_name
821
+ for extension_name, extension_field in extensions.items():
822
+ constant_name = extension_name.upper() + '_FIELD_NUMBER'
823
+ setattr(cls, constant_name, extension_field.number)
824
+
825
+ # TODO: Migrate all users of these attributes to functions like
826
+ # pool.FindExtensionByNumber(descriptor).
827
+ if descriptor.file is not None:
828
+ # TODO: Use cls.MESSAGE_FACTORY.pool when available.
829
+ pool = descriptor.file.pool
830
+
831
+ def _AddStaticMethods(cls):
832
+
833
+ def RegisterExtension(_):
834
+ """no-op to keep generated code <=4.23 working with new runtimes."""
835
+ # This was originally removed in 5.26 (cl/595989309).
836
+ pass
837
+
838
+ cls.RegisterExtension = staticmethod(RegisterExtension)
839
+ def FromString(s):
840
+ message = cls()
841
+ message.MergeFromString(s)
842
+ return message
843
+ cls.FromString = staticmethod(FromString)
844
+
845
+
846
+ def _IsPresent(item):
847
+ """Given a (FieldDescriptor, value) tuple from _fields, return true if the
848
+ value should be included in the list returned by ListFields()."""
849
+
850
+ if item[0].is_repeated:
851
+ return bool(item[1])
852
+ elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
853
+ return item[1]._is_present_in_parent
854
+ else:
855
+ return True
856
+
857
+
858
+ def _AddListFieldsMethod(message_descriptor, cls):
859
+ """Helper for _AddMessageMethods()."""
860
+
861
+ def ListFields(self):
862
+ all_fields = [item for item in self._fields.items() if _IsPresent(item)]
863
+ all_fields.sort(key = lambda item: item[0].number)
864
+ return all_fields
865
+
866
+ cls.ListFields = ListFields
867
+
868
+
869
+ def _AddHasFieldMethod(message_descriptor, cls):
870
+ """Helper for _AddMessageMethods()."""
871
+
872
+ hassable_fields = {}
873
+ for field in message_descriptor.fields:
874
+ if field.is_repeated:
875
+ continue
876
+ # For proto3, only submessages and fields inside a oneof have presence.
877
+ if not field.has_presence:
878
+ continue
879
+ hassable_fields[field.name] = field
880
+
881
+ # Has methods are supported for oneof descriptors.
882
+ for oneof in message_descriptor.oneofs:
883
+ hassable_fields[oneof.name] = oneof
884
+
885
+ def HasField(self, field_name):
886
+ try:
887
+ field = hassable_fields[field_name]
888
+ except KeyError as exc:
889
+ raise ValueError('Protocol message %s has no non-repeated field "%s" '
890
+ 'nor has presence is not available for this field.' % (
891
+ message_descriptor.full_name, field_name)) from exc
892
+
893
+ if isinstance(field, descriptor_mod.OneofDescriptor):
894
+ try:
895
+ return HasField(self, self._oneofs[field].name)
896
+ except KeyError:
897
+ return False
898
+ else:
899
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
900
+ value = self._fields.get(field)
901
+ return value is not None and value._is_present_in_parent
902
+ else:
903
+ return field in self._fields
904
+
905
+ cls.HasField = HasField
906
+
907
+
908
+ def _AddClearFieldMethod(message_descriptor, cls):
909
+ """Helper for _AddMessageMethods()."""
910
+ def ClearField(self, field_name):
911
+ try:
912
+ field = message_descriptor.fields_by_name[field_name]
913
+ except KeyError:
914
+ try:
915
+ field = message_descriptor.oneofs_by_name[field_name]
916
+ if field in self._oneofs:
917
+ field = self._oneofs[field]
918
+ else:
919
+ return
920
+ except KeyError:
921
+ raise ValueError('Protocol message %s has no "%s" field.' %
922
+ (message_descriptor.name, field_name))
923
+
924
+ if field in self._fields:
925
+ # To match the C++ implementation, we need to invalidate iterators
926
+ # for map fields when ClearField() happens.
927
+ if hasattr(self._fields[field], 'InvalidateIterators'):
928
+ self._fields[field].InvalidateIterators()
929
+
930
+ # Note: If the field is a sub-message, its listener will still point
931
+ # at us. That's fine, because the worst than can happen is that it
932
+ # will call _Modified() and invalidate our byte size. Big deal.
933
+ del self._fields[field]
934
+
935
+ if self._oneofs.get(field.containing_oneof, None) is field:
936
+ del self._oneofs[field.containing_oneof]
937
+
938
+ # Always call _Modified() -- even if nothing was changed, this is
939
+ # a mutating method, and thus calling it should cause the field to become
940
+ # present in the parent message.
941
+ self._Modified()
942
+
943
+ cls.ClearField = ClearField
944
+
945
+
946
+ def _AddClearExtensionMethod(cls):
947
+ """Helper for _AddMessageMethods()."""
948
+ def ClearExtension(self, field_descriptor):
949
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
950
+
951
+ # Similar to ClearField(), above.
952
+ if field_descriptor in self._fields:
953
+ del self._fields[field_descriptor]
954
+ self._Modified()
955
+ cls.ClearExtension = ClearExtension
956
+
957
+
958
+ def _AddHasExtensionMethod(cls):
959
+ """Helper for _AddMessageMethods()."""
960
+ def HasExtension(self, field_descriptor):
961
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
962
+ if field_descriptor.is_repeated:
963
+ raise KeyError('"%s" is repeated.' % field_descriptor.full_name)
964
+
965
+ if field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
966
+ value = self._fields.get(field_descriptor)
967
+ return value is not None and value._is_present_in_parent
968
+ else:
969
+ return field_descriptor in self._fields
970
+ cls.HasExtension = HasExtension
971
+
972
+ def _InternalUnpackAny(msg):
973
+ """Unpacks Any message and returns the unpacked message.
974
+
975
+ This internal method is different from public Any Unpack method which takes
976
+ the target message as argument. _InternalUnpackAny method does not have
977
+ target message type and need to find the message type in descriptor pool.
978
+
979
+ Args:
980
+ msg: An Any message to be unpacked.
981
+
982
+ Returns:
983
+ The unpacked message.
984
+ """
985
+ # TODO: Don't use the factory of generated messages.
986
+ # To make Any work with custom factories, use the message factory of the
987
+ # parent message.
988
+ # pylint: disable=g-import-not-at-top
989
+ from google.protobuf import symbol_database
990
+ factory = symbol_database.Default()
991
+
992
+ type_url = msg.type_url
993
+
994
+ if not type_url:
995
+ return None
996
+
997
+ # TODO: For now we just strip the hostname. Better logic will be
998
+ # required.
999
+ type_name = type_url.split('/')[-1]
1000
+ descriptor = factory.pool.FindMessageTypeByName(type_name)
1001
+
1002
+ if descriptor is None:
1003
+ return None
1004
+
1005
+ # Unable to import message_factory at top because of circular import.
1006
+ # pylint: disable=g-import-not-at-top
1007
+ from google.protobuf import message_factory
1008
+ message_class = message_factory.GetMessageClass(descriptor)
1009
+ message = message_class()
1010
+
1011
+ message.ParseFromString(msg.value)
1012
+ return message
1013
+
1014
+
1015
+ def _AddEqualsMethod(message_descriptor, cls):
1016
+ """Helper for _AddMessageMethods()."""
1017
+ def __eq__(self, other):
1018
+ if self.DESCRIPTOR.full_name == _ListValueFullTypeName and isinstance(
1019
+ other, list
1020
+ ):
1021
+ return self._internal_compare(other)
1022
+ if self.DESCRIPTOR.full_name == _StructFullTypeName and isinstance(
1023
+ other, dict
1024
+ ):
1025
+ return self._internal_compare(other)
1026
+
1027
+ if (not isinstance(other, message_mod.Message) or
1028
+ other.DESCRIPTOR != self.DESCRIPTOR):
1029
+ return NotImplemented
1030
+
1031
+ if self is other:
1032
+ return True
1033
+
1034
+ if self.DESCRIPTOR.full_name == _AnyFullTypeName:
1035
+ any_a = _InternalUnpackAny(self)
1036
+ any_b = _InternalUnpackAny(other)
1037
+ if any_a and any_b:
1038
+ return any_a == any_b
1039
+
1040
+ if not self.ListFields() == other.ListFields():
1041
+ return False
1042
+
1043
+ # TODO: Fix UnknownFieldSet to consider MessageSet extensions,
1044
+ # then use it for the comparison.
1045
+ unknown_fields = list(self._unknown_fields)
1046
+ unknown_fields.sort()
1047
+ other_unknown_fields = list(other._unknown_fields)
1048
+ other_unknown_fields.sort()
1049
+ return unknown_fields == other_unknown_fields
1050
+
1051
+ cls.__eq__ = __eq__
1052
+
1053
+
1054
+ def _AddStrMethod(message_descriptor, cls):
1055
+ """Helper for _AddMessageMethods()."""
1056
+ def __str__(self):
1057
+ return text_format.MessageToString(self)
1058
+ cls.__str__ = __str__
1059
+
1060
+
1061
+ def _AddReprMethod(message_descriptor, cls):
1062
+ """Helper for _AddMessageMethods()."""
1063
+ def __repr__(self):
1064
+ return text_format.MessageToString(self)
1065
+ cls.__repr__ = __repr__
1066
+
1067
+
1068
+ def _AddUnicodeMethod(unused_message_descriptor, cls):
1069
+ """Helper for _AddMessageMethods()."""
1070
+
1071
+ def __unicode__(self):
1072
+ return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
1073
+ cls.__unicode__ = __unicode__
1074
+
1075
+
1076
+ def _AddContainsMethod(message_descriptor, cls):
1077
+
1078
+ if message_descriptor.full_name == 'google.protobuf.Struct':
1079
+ def __contains__(self, key):
1080
+ return key in self.fields
1081
+ elif message_descriptor.full_name == 'google.protobuf.ListValue':
1082
+ def __contains__(self, value):
1083
+ return value in self.items()
1084
+ else:
1085
+ def __contains__(self, field):
1086
+ return self.HasField(field)
1087
+
1088
+ cls.__contains__ = __contains__
1089
+
1090
+
1091
+ def _BytesForNonRepeatedElement(value, field_number, field_type):
1092
+ """Returns the number of bytes needed to serialize a non-repeated element.
1093
+ The returned byte count includes space for tag information and any
1094
+ other additional space associated with serializing value.
1095
+
1096
+ Args:
1097
+ value: Value we're serializing.
1098
+ field_number: Field number of this value. (Since the field number
1099
+ is stored as part of a varint-encoded tag, this has an impact
1100
+ on the total bytes required to serialize the value).
1101
+ field_type: The type of the field. One of the TYPE_* constants
1102
+ within FieldDescriptor.
1103
+ """
1104
+ try:
1105
+ fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
1106
+ return fn(field_number, value)
1107
+ except KeyError:
1108
+ raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
1109
+
1110
+
1111
+ def _AddByteSizeMethod(message_descriptor, cls):
1112
+ """Helper for _AddMessageMethods()."""
1113
+
1114
+ def ByteSize(self):
1115
+ if not self._cached_byte_size_dirty:
1116
+ return self._cached_byte_size
1117
+
1118
+ size = 0
1119
+ descriptor = self.DESCRIPTOR
1120
+ if descriptor._is_map_entry:
1121
+ # Fields of map entry should always be serialized.
1122
+ key_field = descriptor.fields_by_name['key']
1123
+ _MaybeAddEncoder(cls, key_field)
1124
+ size = key_field._sizer(self.key)
1125
+ value_field = descriptor.fields_by_name['value']
1126
+ _MaybeAddEncoder(cls, value_field)
1127
+ size += value_field._sizer(self.value)
1128
+ else:
1129
+ for field_descriptor, field_value in self.ListFields():
1130
+ _MaybeAddEncoder(cls, field_descriptor)
1131
+ size += field_descriptor._sizer(field_value)
1132
+ for tag_bytes, value_bytes in self._unknown_fields:
1133
+ size += len(tag_bytes) + len(value_bytes)
1134
+
1135
+ self._cached_byte_size = size
1136
+ self._cached_byte_size_dirty = False
1137
+ self._listener_for_children.dirty = False
1138
+ return size
1139
+
1140
+ cls.ByteSize = ByteSize
1141
+
1142
+
1143
+ def _AddSerializeToStringMethod(message_descriptor, cls):
1144
+ """Helper for _AddMessageMethods()."""
1145
+
1146
+ def SerializeToString(self, **kwargs):
1147
+ # Check if the message has all of its required fields set.
1148
+ if not self.IsInitialized():
1149
+ raise message_mod.EncodeError(
1150
+ 'Message %s is missing required fields: %s' % (
1151
+ self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
1152
+ return self.SerializePartialToString(**kwargs)
1153
+ cls.SerializeToString = SerializeToString
1154
+
1155
+
1156
+ def _AddSerializePartialToStringMethod(message_descriptor, cls):
1157
+ """Helper for _AddMessageMethods()."""
1158
+
1159
+ def SerializePartialToString(self, **kwargs):
1160
+ out = BytesIO()
1161
+ self._InternalSerialize(out.write, **kwargs)
1162
+ return out.getvalue()
1163
+ cls.SerializePartialToString = SerializePartialToString
1164
+
1165
+ def InternalSerialize(self, write_bytes, deterministic=None):
1166
+ if deterministic is None:
1167
+ deterministic = (
1168
+ api_implementation.IsPythonDefaultSerializationDeterministic())
1169
+ else:
1170
+ deterministic = bool(deterministic)
1171
+
1172
+ descriptor = self.DESCRIPTOR
1173
+ if descriptor._is_map_entry:
1174
+ # Fields of map entry should always be serialized.
1175
+ key_field = descriptor.fields_by_name['key']
1176
+ _MaybeAddEncoder(cls, key_field)
1177
+ key_field._encoder(write_bytes, self.key, deterministic)
1178
+ value_field = descriptor.fields_by_name['value']
1179
+ _MaybeAddEncoder(cls, value_field)
1180
+ value_field._encoder(write_bytes, self.value, deterministic)
1181
+ else:
1182
+ for field_descriptor, field_value in self.ListFields():
1183
+ _MaybeAddEncoder(cls, field_descriptor)
1184
+ field_descriptor._encoder(write_bytes, field_value, deterministic)
1185
+ for tag_bytes, value_bytes in self._unknown_fields:
1186
+ write_bytes(tag_bytes)
1187
+ write_bytes(value_bytes)
1188
+ cls._InternalSerialize = InternalSerialize
1189
+
1190
+
1191
+ def _AddMergeFromStringMethod(message_descriptor, cls):
1192
+ """Helper for _AddMessageMethods()."""
1193
+ def MergeFromString(self, serialized):
1194
+ serialized = memoryview(serialized)
1195
+ length = len(serialized)
1196
+ try:
1197
+ if self._InternalParse(serialized, 0, length) != length:
1198
+ # The only reason _InternalParse would return early is if it
1199
+ # encountered an end-group tag.
1200
+ raise message_mod.DecodeError('Unexpected end-group tag.')
1201
+ except (IndexError, TypeError):
1202
+ # Now ord(buf[p:p+1]) == ord('') gets TypeError.
1203
+ raise message_mod.DecodeError('Truncated message.')
1204
+ except struct.error as e:
1205
+ raise message_mod.DecodeError(e)
1206
+ return length # Return this for legacy reasons.
1207
+ cls.MergeFromString = MergeFromString
1208
+
1209
+ fields_by_tag = cls._fields_by_tag
1210
+ message_set_decoders_by_tag = cls._message_set_decoders_by_tag
1211
+
1212
+ def InternalParse(self, buffer, pos, end, current_depth=0):
1213
+ """Create a message from serialized bytes.
1214
+
1215
+ Args:
1216
+ self: Message, instance of the proto message object.
1217
+ buffer: memoryview of the serialized data.
1218
+ pos: int, position to start in the serialized data.
1219
+ end: int, end position of the serialized data.
1220
+
1221
+ Returns:
1222
+ Message object.
1223
+ """
1224
+ # Guard against internal misuse, since this function is called internally
1225
+ # quite extensively, and its easy to accidentally pass bytes.
1226
+ assert isinstance(buffer, memoryview)
1227
+ self._Modified()
1228
+ field_dict = self._fields
1229
+ while pos != end:
1230
+ (tag_bytes, new_pos) = decoder.ReadTag(buffer, pos)
1231
+ field_decoder, field_des = message_set_decoders_by_tag.get(
1232
+ tag_bytes, (None, None)
1233
+ )
1234
+ if field_decoder:
1235
+ pos = field_decoder(buffer, new_pos, end, self, field_dict)
1236
+ continue
1237
+ field_des, is_packed = fields_by_tag.get(tag_bytes, (None, None))
1238
+ if field_des is None:
1239
+ if not self._unknown_fields: # pylint: disable=protected-access
1240
+ self._unknown_fields = [] # pylint: disable=protected-access
1241
+ field_number, wire_type = decoder.DecodeTag(tag_bytes)
1242
+ if field_number == 0:
1243
+ raise message_mod.DecodeError('Field number 0 is illegal.')
1244
+ (data, new_pos) = decoder._DecodeUnknownField(
1245
+ buffer, new_pos, end, field_number, wire_type
1246
+ ) # pylint: disable=protected-access
1247
+ if new_pos == -1:
1248
+ return pos
1249
+ self._unknown_fields.append(
1250
+ (tag_bytes, buffer[pos + len(tag_bytes) : new_pos].tobytes())
1251
+ )
1252
+ pos = new_pos
1253
+ else:
1254
+ _MaybeAddDecoder(cls, field_des)
1255
+ field_decoder = field_des._decoders[is_packed]
1256
+ pos = field_decoder(
1257
+ buffer, new_pos, end, self, field_dict, current_depth
1258
+ )
1259
+ if field_des.containing_oneof:
1260
+ self._UpdateOneofState(field_des)
1261
+ return pos
1262
+
1263
+ cls._InternalParse = InternalParse
1264
+
1265
+
1266
+ def _AddIsInitializedMethod(message_descriptor, cls):
1267
+ """Adds the IsInitialized and FindInitializationError methods to the
1268
+ protocol message class."""
1269
+
1270
+ required_fields = [field for field in message_descriptor.fields
1271
+ if field.is_required]
1272
+
1273
+ def IsInitialized(self, errors=None):
1274
+ """Checks if all required fields of a message are set.
1275
+
1276
+ Args:
1277
+ errors: A list which, if provided, will be populated with the field
1278
+ paths of all missing required fields.
1279
+
1280
+ Returns:
1281
+ True iff the specified message has all required fields set.
1282
+ """
1283
+
1284
+ # Performance is critical so we avoid HasField() and ListFields().
1285
+
1286
+ for field in required_fields:
1287
+ if (field not in self._fields or
1288
+ (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
1289
+ not self._fields[field]._is_present_in_parent)):
1290
+ if errors is not None:
1291
+ errors.extend(self.FindInitializationErrors())
1292
+ return False
1293
+
1294
+ for field, value in list(self._fields.items()): # dict can change size!
1295
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1296
+ if field.is_repeated:
1297
+ if (field.message_type._is_map_entry):
1298
+ continue
1299
+ for element in value:
1300
+ if not element.IsInitialized():
1301
+ if errors is not None:
1302
+ errors.extend(self.FindInitializationErrors())
1303
+ return False
1304
+ elif value._is_present_in_parent and not value.IsInitialized():
1305
+ if errors is not None:
1306
+ errors.extend(self.FindInitializationErrors())
1307
+ return False
1308
+
1309
+ return True
1310
+
1311
+ cls.IsInitialized = IsInitialized
1312
+
1313
+ def FindInitializationErrors(self):
1314
+ """Finds required fields which are not initialized.
1315
+
1316
+ Returns:
1317
+ A list of strings. Each string is a path to an uninitialized field from
1318
+ the top-level message, e.g. "foo.bar[5].baz".
1319
+ """
1320
+
1321
+ errors = [] # simplify things
1322
+
1323
+ for field in required_fields:
1324
+ if not self.HasField(field.name):
1325
+ errors.append(field.name)
1326
+
1327
+ for field, value in self.ListFields():
1328
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1329
+ if field.is_extension:
1330
+ name = '(%s)' % field.full_name
1331
+ else:
1332
+ name = field.name
1333
+
1334
+ if _IsMapField(field):
1335
+ if _IsMessageMapField(field):
1336
+ for key in value:
1337
+ element = value[key]
1338
+ prefix = '%s[%s].' % (name, key)
1339
+ sub_errors = element.FindInitializationErrors()
1340
+ errors += [prefix + error for error in sub_errors]
1341
+ else:
1342
+ # ScalarMaps can't have any initialization errors.
1343
+ pass
1344
+ elif field.is_repeated:
1345
+ for i in range(len(value)):
1346
+ element = value[i]
1347
+ prefix = '%s[%d].' % (name, i)
1348
+ sub_errors = element.FindInitializationErrors()
1349
+ errors += [prefix + error for error in sub_errors]
1350
+ else:
1351
+ prefix = name + '.'
1352
+ sub_errors = value.FindInitializationErrors()
1353
+ errors += [prefix + error for error in sub_errors]
1354
+
1355
+ return errors
1356
+
1357
+ cls.FindInitializationErrors = FindInitializationErrors
1358
+
1359
+
1360
+ def _FullyQualifiedClassName(klass):
1361
+ module = klass.__module__
1362
+ name = getattr(klass, '__qualname__', klass.__name__)
1363
+ if module in (None, 'builtins', '__builtin__'):
1364
+ return name
1365
+ return module + '.' + name
1366
+
1367
+
1368
+ def _AddMergeFromMethod(cls):
1369
+ CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
1370
+
1371
+ def MergeFrom(self, msg):
1372
+ if not isinstance(msg, cls):
1373
+ raise TypeError(
1374
+ 'Parameter to MergeFrom() must be instance of same class: '
1375
+ 'expected %s got %s.' % (_FullyQualifiedClassName(cls),
1376
+ _FullyQualifiedClassName(msg.__class__)))
1377
+
1378
+ assert msg is not self
1379
+ self._Modified()
1380
+
1381
+ fields = self._fields
1382
+
1383
+ for field, value in msg._fields.items():
1384
+ if field.is_repeated:
1385
+ field_value = fields.get(field)
1386
+ if field_value is None:
1387
+ # Construct a new object to represent this field.
1388
+ field_value = field._default_constructor(self)
1389
+ fields[field] = field_value
1390
+ field_value.MergeFrom(value)
1391
+ elif field.cpp_type == CPPTYPE_MESSAGE:
1392
+ if value._is_present_in_parent:
1393
+ field_value = fields.get(field)
1394
+ if field_value is None:
1395
+ # Construct a new object to represent this field.
1396
+ field_value = field._default_constructor(self)
1397
+ fields[field] = field_value
1398
+ field_value.MergeFrom(value)
1399
+ else:
1400
+ self._fields[field] = value
1401
+ if field.containing_oneof:
1402
+ self._UpdateOneofState(field)
1403
+
1404
+ if msg._unknown_fields:
1405
+ if not self._unknown_fields:
1406
+ self._unknown_fields = []
1407
+ self._unknown_fields.extend(msg._unknown_fields)
1408
+
1409
+ cls.MergeFrom = MergeFrom
1410
+
1411
+
1412
+ def _AddWhichOneofMethod(message_descriptor, cls):
1413
+ def WhichOneof(self, oneof_name):
1414
+ """Returns the name of the currently set field inside a oneof, or None."""
1415
+ try:
1416
+ field = message_descriptor.oneofs_by_name[oneof_name]
1417
+ except KeyError:
1418
+ raise ValueError(
1419
+ 'Protocol message has no oneof "%s" field.' % oneof_name)
1420
+
1421
+ nested_field = self._oneofs.get(field, None)
1422
+ if nested_field is not None and self.HasField(nested_field.name):
1423
+ return nested_field.name
1424
+ else:
1425
+ return None
1426
+
1427
+ cls.WhichOneof = WhichOneof
1428
+
1429
+
1430
+ def _Clear(self):
1431
+ # Clear fields.
1432
+ self._fields = {}
1433
+ self._unknown_fields = ()
1434
+
1435
+ self._oneofs = {}
1436
+ self._Modified()
1437
+
1438
+
1439
+ def _UnknownFields(self):
1440
+ raise NotImplementedError('Please use the add-on feaure '
1441
+ 'unknown_fields.UnknownFieldSet(message) in '
1442
+ 'unknown_fields.py instead.')
1443
+
1444
+
1445
+ def _DiscardUnknownFields(self):
1446
+ self._unknown_fields = []
1447
+ for field, value in self.ListFields():
1448
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1449
+ if _IsMapField(field):
1450
+ if _IsMessageMapField(field):
1451
+ for key in value:
1452
+ value[key].DiscardUnknownFields()
1453
+ elif field.is_repeated:
1454
+ for sub_message in value:
1455
+ sub_message.DiscardUnknownFields()
1456
+ else:
1457
+ value.DiscardUnknownFields()
1458
+
1459
+
1460
+ def _SetListener(self, listener):
1461
+ if listener is None:
1462
+ self._listener = message_listener_mod.NullMessageListener()
1463
+ else:
1464
+ self._listener = listener
1465
+
1466
+
1467
+ def _AddMessageMethods(message_descriptor, cls):
1468
+ """Adds implementations of all Message methods to cls."""
1469
+ _AddListFieldsMethod(message_descriptor, cls)
1470
+ _AddHasFieldMethod(message_descriptor, cls)
1471
+ _AddClearFieldMethod(message_descriptor, cls)
1472
+ if message_descriptor.is_extendable:
1473
+ _AddClearExtensionMethod(cls)
1474
+ _AddHasExtensionMethod(cls)
1475
+ _AddEqualsMethod(message_descriptor, cls)
1476
+ _AddStrMethod(message_descriptor, cls)
1477
+ _AddReprMethod(message_descriptor, cls)
1478
+ _AddUnicodeMethod(message_descriptor, cls)
1479
+ _AddContainsMethod(message_descriptor, cls)
1480
+ _AddByteSizeMethod(message_descriptor, cls)
1481
+ _AddSerializeToStringMethod(message_descriptor, cls)
1482
+ _AddSerializePartialToStringMethod(message_descriptor, cls)
1483
+ _AddMergeFromStringMethod(message_descriptor, cls)
1484
+ _AddIsInitializedMethod(message_descriptor, cls)
1485
+ _AddMergeFromMethod(cls)
1486
+ _AddWhichOneofMethod(message_descriptor, cls)
1487
+ # Adds methods which do not depend on cls.
1488
+ cls.Clear = _Clear
1489
+ cls.DiscardUnknownFields = _DiscardUnknownFields
1490
+ cls._SetListener = _SetListener
1491
+
1492
+
1493
+ def _AddPrivateHelperMethods(message_descriptor, cls):
1494
+ """Adds implementation of private helper methods to cls."""
1495
+
1496
+ def Modified(self):
1497
+ """Sets the _cached_byte_size_dirty bit to true,
1498
+ and propagates this to our listener iff this was a state change.
1499
+ """
1500
+
1501
+ # Note: Some callers check _cached_byte_size_dirty before calling
1502
+ # _Modified() as an extra optimization. So, if this method is ever
1503
+ # changed such that it does stuff even when _cached_byte_size_dirty is
1504
+ # already true, the callers need to be updated.
1505
+ if not self._cached_byte_size_dirty:
1506
+ self._cached_byte_size_dirty = True
1507
+ self._listener_for_children.dirty = True
1508
+ self._is_present_in_parent = True
1509
+ self._listener.Modified()
1510
+
1511
+ def _UpdateOneofState(self, field):
1512
+ """Sets field as the active field in its containing oneof.
1513
+
1514
+ Will also delete currently active field in the oneof, if it is different
1515
+ from the argument. Does not mark the message as modified.
1516
+ """
1517
+ other_field = self._oneofs.setdefault(field.containing_oneof, field)
1518
+ if other_field is not field:
1519
+ del self._fields[other_field]
1520
+ self._oneofs[field.containing_oneof] = field
1521
+
1522
+ cls._Modified = Modified
1523
+ cls.SetInParent = Modified
1524
+ cls._UpdateOneofState = _UpdateOneofState
1525
+
1526
+
1527
+ class _Listener(object):
1528
+
1529
+ """MessageListener implementation that a parent message registers with its
1530
+ child message.
1531
+
1532
+ In order to support semantics like:
1533
+
1534
+ foo.bar.baz.moo = 23
1535
+ assert foo.HasField('bar')
1536
+
1537
+ ...child objects must have back references to their parents.
1538
+ This helper class is at the heart of this support.
1539
+ """
1540
+
1541
+ def __init__(self, parent_message):
1542
+ """Args:
1543
+ parent_message: The message whose _Modified() method we should call when
1544
+ we receive Modified() messages.
1545
+ """
1546
+ # This listener establishes a back reference from a child (contained) object
1547
+ # to its parent (containing) object. We make this a weak reference to avoid
1548
+ # creating cyclic garbage when the client finishes with the 'parent' object
1549
+ # in the tree.
1550
+ if isinstance(parent_message, weakref.ProxyType):
1551
+ self._parent_message_weakref = parent_message
1552
+ else:
1553
+ self._parent_message_weakref = weakref.proxy(parent_message)
1554
+
1555
+ # As an optimization, we also indicate directly on the listener whether
1556
+ # or not the parent message is dirty. This way we can avoid traversing
1557
+ # up the tree in the common case.
1558
+ self.dirty = False
1559
+
1560
+ def Modified(self):
1561
+ if self.dirty:
1562
+ return
1563
+ try:
1564
+ # Propagate the signal to our parents iff this is the first field set.
1565
+ self._parent_message_weakref._Modified()
1566
+ except ReferenceError:
1567
+ # We can get here if a client has kept a reference to a child object,
1568
+ # and is now setting a field on it, but the child's parent has been
1569
+ # garbage-collected. This is not an error.
1570
+ pass
1571
+
1572
+
1573
+ class _OneofListener(_Listener):
1574
+ """Special listener implementation for setting composite oneof fields."""
1575
+
1576
+ def __init__(self, parent_message, field):
1577
+ """Args:
1578
+ parent_message: The message whose _Modified() method we should call when
1579
+ we receive Modified() messages.
1580
+ field: The descriptor of the field being set in the parent message.
1581
+ """
1582
+ super(_OneofListener, self).__init__(parent_message)
1583
+ self._field = field
1584
+
1585
+ def Modified(self):
1586
+ """Also updates the state of the containing oneof in the parent message."""
1587
+ try:
1588
+ self._parent_message_weakref._UpdateOneofState(self._field)
1589
+ super(_OneofListener, self).Modified()
1590
+ except ReferenceError:
1591
+ pass
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/testing_refleaks.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A subclass of unittest.TestCase which checks for reference leaks.
9
+
10
+ To use:
11
+ - Use testing_refleak.BaseTestCase instead of unittest.TestCase
12
+ - Configure and compile Python with --with-pydebug
13
+
14
+ If sys.gettotalrefcount() is not available (because Python was built without
15
+ the Py_DEBUG option), then this module is a no-op and tests will run normally.
16
+ """
17
+
18
+ import copyreg
19
+ import gc
20
+ import sys
21
+ import unittest
22
+
23
+
24
+ class LocalTestResult(unittest.TestResult):
25
+ """A TestResult which forwards events to a parent object, except for Skips."""
26
+
27
+ def __init__(self, parent_result):
28
+ unittest.TestResult.__init__(self)
29
+ self.parent_result = parent_result
30
+
31
+ def addError(self, test, error):
32
+ self.parent_result.addError(test, error)
33
+
34
+ def addFailure(self, test, error):
35
+ self.parent_result.addFailure(test, error)
36
+
37
+ def addSkip(self, test, reason):
38
+ pass
39
+
40
+ def addDuration(self, test, duration):
41
+ pass
42
+
43
+
44
+ class ReferenceLeakCheckerMixin(object):
45
+ """A mixin class for TestCase, which checks reference counts."""
46
+
47
+ NB_RUNS = 3
48
+
49
+ def run(self, result=None):
50
+ testMethod = getattr(self, self._testMethodName)
51
+ expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False)
52
+ expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
53
+ if expecting_failure_class or expecting_failure_method:
54
+ return
55
+
56
+ # python_message.py registers all Message classes to some pickle global
57
+ # registry, which makes the classes immortal.
58
+ # We save a copy of this registry, and reset it before we could references.
59
+ self._saved_pickle_registry = copyreg.dispatch_table.copy()
60
+
61
+ # Run the test twice, to warm up the instance attributes.
62
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
63
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
64
+
65
+ local_result = LocalTestResult(result)
66
+ num_flakes = 0
67
+ refcount_deltas = []
68
+
69
+ # Observe the refcount, then create oldrefcount which actually makes the
70
+ # refcount 1 higher than the recorded value immediately
71
+ oldrefcount = self._getRefcounts()
72
+ while len(refcount_deltas) < self.NB_RUNS:
73
+ oldrefcount = self._getRefcounts()
74
+ super(ReferenceLeakCheckerMixin, self).run(result=local_result)
75
+ newrefcount = self._getRefcounts()
76
+ # If the GC was able to collect some objects after the call to run() that
77
+ # it could not collect before the call, then the counts won't match.
78
+ if newrefcount < oldrefcount and num_flakes < 2:
79
+ # This result is (probably) a flake -- garbage collectors aren't very
80
+ # predictable, but a lower ending refcount is the opposite of the
81
+ # failure we are testing for. If the result is repeatable, then we will
82
+ # eventually report it, but not after trying to eliminate it.
83
+ num_flakes += 1
84
+ continue
85
+ num_flakes = 0
86
+ refcount_deltas.append(newrefcount - oldrefcount)
87
+ print(refcount_deltas, self)
88
+
89
+ try:
90
+ self.assertEqual(refcount_deltas, [0] * self.NB_RUNS)
91
+ except Exception: # pylint: disable=broad-except
92
+ result.addError(self, sys.exc_info())
93
+
94
+ def _getRefcounts(self):
95
+ if hasattr(sys, "_clear_internal_caches"): # Since 3.13
96
+ sys._clear_internal_caches() # pylint: disable=protected-access
97
+ else:
98
+ sys._clear_type_cache() # pylint: disable=protected-access
99
+ copyreg.dispatch_table.clear()
100
+ copyreg.dispatch_table.update(self._saved_pickle_registry)
101
+ # It is sometimes necessary to gc.collect() multiple times, to ensure
102
+ # that all objects can be collected.
103
+ gc.collect()
104
+ gc.collect()
105
+ gc.collect()
106
+ return sys.gettotalrefcount()
107
+
108
+
109
+ if hasattr(sys, 'gettotalrefcount'):
110
+
111
+ def TestCase(test_class):
112
+ new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__
113
+ new_class = type(test_class)(
114
+ test_class.__name__, new_bases, dict(test_class.__dict__))
115
+ return new_class
116
+ SkipReferenceLeakChecker = unittest.skip
117
+
118
+ else:
119
+ # When PyDEBUG is not enabled, run the tests normally.
120
+
121
+ def TestCase(test_class):
122
+ return test_class
123
+
124
+ def SkipReferenceLeakChecker(reason):
125
+ del reason # Don't skip, so don't need a reason.
126
+ def Same(func):
127
+ return func
128
+ return Same
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/type_checkers.py ADDED
@@ -0,0 +1,455 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides type checking routines.
9
+
10
+ This module defines type checking utilities in the forms of dictionaries:
11
+
12
+ VALUE_CHECKERS: A dictionary of field types and a value validation object.
13
+ TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
14
+ function.
15
+ TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
16
+ function.
17
+ FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
18
+ corresponding wire types.
19
+ TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
20
+ function.
21
+ """
22
+
23
+ __author__ = 'robinson@google.com (Will Robinson)'
24
+
25
+ import numbers
26
+ import struct
27
+ import warnings
28
+
29
+ from google.protobuf import descriptor
30
+ from google.protobuf.internal import decoder
31
+ from google.protobuf.internal import encoder
32
+ from google.protobuf.internal import wire_format
33
+
34
+ _FieldDescriptor = descriptor.FieldDescriptor
35
+ # TODO: Remove this warning count after 34.0
36
+ # Assign bool to int/enum warnings will print 100 times at most which should
37
+ # be enough for users to notice and do not cause timeout.
38
+ _BoolWarningCount = 100
39
+
40
+ def TruncateToFourByteFloat(original):
41
+ return struct.unpack('<f', struct.pack('<f', original))[0]
42
+
43
+
44
+ def ToShortestFloat(original):
45
+ """Returns the shortest float that has same value in wire."""
46
+ # All 4 byte floats have between 6 and 9 significant digits, so we
47
+ # start with 6 as the lower bound.
48
+ # It has to be iterative because use '.9g' directly can not get rid
49
+ # of the noises for most values. For example if set a float_field=0.9
50
+ # use '.9g' will print 0.899999976.
51
+ precision = 6
52
+ rounded = float('{0:.{1}g}'.format(original, precision))
53
+ while TruncateToFourByteFloat(rounded) != original:
54
+ precision += 1
55
+ rounded = float('{0:.{1}g}'.format(original, precision))
56
+ return rounded
57
+
58
+
59
+ def GetTypeChecker(field):
60
+ """Returns a type checker for a message field of the specified types.
61
+
62
+ Args:
63
+ field: FieldDescriptor object for this field.
64
+
65
+ Returns:
66
+ An instance of TypeChecker which can be used to verify the types
67
+ of values assigned to a field of the specified type.
68
+ """
69
+ if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
70
+ field.type == _FieldDescriptor.TYPE_STRING):
71
+ return UnicodeValueChecker()
72
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
73
+ if field.enum_type.is_closed:
74
+ return EnumValueChecker(field.enum_type)
75
+ else:
76
+ # When open enums are supported, any int32 can be assigned.
77
+ return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
78
+ return _VALUE_CHECKERS[field.cpp_type]
79
+
80
+
81
+ # None of the typecheckers below make any attempt to guard against people
82
+ # subclassing builtin types and doing weird things. We're not trying to
83
+ # protect against malicious clients here, just people accidentally shooting
84
+ # themselves in the foot in obvious ways.
85
+ class TypeChecker(object):
86
+
87
+ """Type checker used to catch type errors as early as possible
88
+ when the client is setting scalar fields in protocol messages.
89
+ """
90
+
91
+ def __init__(self, *acceptable_types):
92
+ self._acceptable_types = acceptable_types
93
+
94
+ def CheckValue(self, proposed_value):
95
+ """Type check the provided value and return it.
96
+
97
+ The returned value might have been normalized to another type.
98
+ """
99
+ if not isinstance(proposed_value, self._acceptable_types):
100
+ message = ('%.1024r has type %s, but expected one of: %s' %
101
+ (proposed_value, type(proposed_value), self._acceptable_types))
102
+ raise TypeError(message)
103
+ return proposed_value
104
+
105
+
106
+ class TypeCheckerWithDefault(TypeChecker):
107
+
108
+ def __init__(self, default_value, *acceptable_types):
109
+ TypeChecker.__init__(self, *acceptable_types)
110
+ self._default_value = default_value
111
+
112
+ def DefaultValue(self):
113
+ return self._default_value
114
+
115
+
116
+ class BoolValueChecker(object):
117
+ """Type checker used for bool fields."""
118
+
119
+ def CheckValue(self, proposed_value):
120
+ if not hasattr(proposed_value, '__index__'):
121
+ # Under NumPy 2.3, numpy.bool does not have an __index__ method.
122
+ if (type(proposed_value).__module__ == 'numpy' and
123
+ type(proposed_value).__name__ == 'bool'):
124
+ return bool(proposed_value)
125
+ message = ('%.1024r has type %s, but expected one of: %s' %
126
+ (proposed_value, type(proposed_value), (bool, int)))
127
+ raise TypeError(message)
128
+
129
+ if (type(proposed_value).__module__ == 'numpy' and
130
+ type(proposed_value).__name__ == 'ndarray'):
131
+ message = ('%.1024r has type %s, but expected one of: %s' %
132
+ (proposed_value, type(proposed_value), (bool, int)))
133
+ raise TypeError(message)
134
+
135
+ return bool(proposed_value)
136
+
137
+ def DefaultValue(self):
138
+ return False
139
+
140
+
141
+ # IntValueChecker and its subclasses perform integer type-checks
142
+ # and bounds-checks.
143
+ class IntValueChecker(object):
144
+
145
+ """Checker used for integer fields. Performs type-check and range check."""
146
+
147
+ def CheckValue(self, proposed_value):
148
+ global _BoolWarningCount
149
+ if type(proposed_value) == bool and _BoolWarningCount > 0:
150
+ _BoolWarningCount -= 1
151
+ message = (
152
+ '%.1024r has type %s, but expected one of: %s. This warning '
153
+ 'will turn into error in 7.34.0, please fix it before that.'
154
+ % (
155
+ proposed_value,
156
+ type(proposed_value),
157
+ (int,),
158
+ )
159
+ )
160
+ # TODO: Raise errors in 2026 Q1 release
161
+ warnings.warn(message)
162
+
163
+ if not hasattr(proposed_value, '__index__') or (
164
+ type(proposed_value).__module__ == 'numpy' and
165
+ type(proposed_value).__name__ == 'ndarray'):
166
+ message = ('%.1024r has type %s, but expected one of: %s' %
167
+ (proposed_value, type(proposed_value), (int,)))
168
+ raise TypeError(message)
169
+
170
+ if not self._MIN <= int(proposed_value) <= self._MAX:
171
+ raise ValueError('Value out of range: %d' % proposed_value)
172
+ # We force all values to int to make alternate implementations where the
173
+ # distinction is more significant (e.g. the C++ implementation) simpler.
174
+ proposed_value = int(proposed_value)
175
+ return proposed_value
176
+
177
+ def DefaultValue(self):
178
+ return 0
179
+
180
+
181
+ class EnumValueChecker(object):
182
+
183
+ """Checker used for enum fields. Performs type-check and range check."""
184
+
185
+ def __init__(self, enum_type):
186
+ self._enum_type = enum_type
187
+
188
+ def CheckValue(self, proposed_value):
189
+ global _BoolWarningCount
190
+ if type(proposed_value) == bool and _BoolWarningCount > 0:
191
+ _BoolWarningCount -= 1
192
+ message = (
193
+ '%.1024r has type %s, but expected one of: %s. This warning '
194
+ 'will turn into error in 7.34.0, please fix it before that.'
195
+ % (
196
+ proposed_value,
197
+ type(proposed_value),
198
+ (int,),
199
+ )
200
+ )
201
+ # TODO: Raise errors in 2026 Q1 release
202
+ warnings.warn(message)
203
+ if not isinstance(proposed_value, numbers.Integral):
204
+ message = ('%.1024r has type %s, but expected one of: %s' %
205
+ (proposed_value, type(proposed_value), (int,)))
206
+ raise TypeError(message)
207
+ if int(proposed_value) not in self._enum_type.values_by_number:
208
+ raise ValueError('Unknown enum value: %d' % proposed_value)
209
+ return proposed_value
210
+
211
+ def DefaultValue(self):
212
+ return self._enum_type.values[0].number
213
+
214
+
215
+ class UnicodeValueChecker(object):
216
+
217
+ """Checker used for string fields.
218
+
219
+ Always returns a unicode value, even if the input is of type str.
220
+ """
221
+
222
+ def CheckValue(self, proposed_value):
223
+ if not isinstance(proposed_value, (bytes, str)):
224
+ message = ('%.1024r has type %s, but expected one of: %s' %
225
+ (proposed_value, type(proposed_value), (bytes, str)))
226
+ raise TypeError(message)
227
+
228
+ # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
229
+ if isinstance(proposed_value, bytes):
230
+ try:
231
+ proposed_value = proposed_value.decode('utf-8')
232
+ except UnicodeDecodeError:
233
+ raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
234
+ 'encoding. Non-UTF-8 strings must be converted to '
235
+ 'unicode objects before being added.' %
236
+ (proposed_value))
237
+ else:
238
+ try:
239
+ proposed_value.encode('utf8')
240
+ except UnicodeEncodeError:
241
+ raise ValueError('%.1024r isn\'t a valid unicode string and '
242
+ 'can\'t be encoded in UTF-8.'%
243
+ (proposed_value))
244
+
245
+ return proposed_value
246
+
247
+ def DefaultValue(self):
248
+ return u""
249
+
250
+
251
+ class Int32ValueChecker(IntValueChecker):
252
+ # We're sure to use ints instead of longs here since comparison may be more
253
+ # efficient.
254
+ _MIN = -2147483648
255
+ _MAX = 2147483647
256
+
257
+
258
+ class Uint32ValueChecker(IntValueChecker):
259
+ _MIN = 0
260
+ _MAX = (1 << 32) - 1
261
+
262
+
263
+ class Int64ValueChecker(IntValueChecker):
264
+ _MIN = -(1 << 63)
265
+ _MAX = (1 << 63) - 1
266
+
267
+
268
+ class Uint64ValueChecker(IntValueChecker):
269
+ _MIN = 0
270
+ _MAX = (1 << 64) - 1
271
+
272
+
273
+ # The max 4 bytes float is about 3.4028234663852886e+38
274
+ _FLOAT_MAX = float.fromhex('0x1.fffffep+127')
275
+ _FLOAT_MIN = -_FLOAT_MAX
276
+ _MAX_FLOAT_AS_DOUBLE_ROUNDED = 3.4028235677973366e38
277
+ _INF = float('inf')
278
+ _NEG_INF = float('-inf')
279
+
280
+
281
+ class DoubleValueChecker(object):
282
+ """Checker used for double fields.
283
+
284
+ Performs type-check and range check.
285
+ """
286
+
287
+ def CheckValue(self, proposed_value):
288
+ """Check and convert proposed_value to float."""
289
+ if (not hasattr(proposed_value, '__float__') and
290
+ not hasattr(proposed_value, '__index__')) or (
291
+ type(proposed_value).__module__ == 'numpy' and
292
+ type(proposed_value).__name__ == 'ndarray'):
293
+ message = ('%.1024r has type %s, but expected one of: int, float' %
294
+ (proposed_value, type(proposed_value)))
295
+ raise TypeError(message)
296
+ return float(proposed_value)
297
+
298
+ def DefaultValue(self):
299
+ return 0.0
300
+
301
+
302
+ class FloatValueChecker(DoubleValueChecker):
303
+ """Checker used for float fields.
304
+
305
+ Performs type-check and range check.
306
+
307
+ Values exceeding a 32-bit float will be converted to inf/-inf.
308
+ """
309
+
310
+ def CheckValue(self, proposed_value):
311
+ """Check and convert proposed_value to float."""
312
+ converted_value = super().CheckValue(proposed_value)
313
+ # This inf rounding matches the C++ proto SafeDoubleToFloat logic.
314
+ if converted_value > _FLOAT_MAX:
315
+ if converted_value <= _MAX_FLOAT_AS_DOUBLE_ROUNDED:
316
+ return _FLOAT_MAX
317
+ return _INF
318
+ if converted_value < _FLOAT_MIN:
319
+ if converted_value >= -_MAX_FLOAT_AS_DOUBLE_ROUNDED:
320
+ return _FLOAT_MIN
321
+ return _NEG_INF
322
+
323
+ return TruncateToFourByteFloat(converted_value)
324
+
325
+ # Type-checkers for all scalar CPPTYPEs.
326
+ _VALUE_CHECKERS = {
327
+ _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
328
+ _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
329
+ _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
330
+ _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
331
+ _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(),
332
+ _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
333
+ _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(),
334
+ _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
335
+ }
336
+
337
+
338
+ # Map from field type to a function F, such that F(field_num, value)
339
+ # gives the total byte size for a value of the given type. This
340
+ # byte size includes tag information and any other additional space
341
+ # associated with serializing "value".
342
+ TYPE_TO_BYTE_SIZE_FN = {
343
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
344
+ _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
345
+ _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
346
+ _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
347
+ _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
348
+ _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
349
+ _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
350
+ _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
351
+ _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
352
+ _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
353
+ _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
354
+ _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
355
+ _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
356
+ _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
357
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
358
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
359
+ _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
360
+ _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
361
+ }
362
+
363
+
364
+ # Maps from field types to encoder constructors.
365
+ TYPE_TO_ENCODER = {
366
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
367
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
368
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
369
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
370
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
371
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
372
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
373
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
374
+ _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
375
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
376
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
377
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
378
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
379
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
380
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
381
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
382
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
383
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
384
+ }
385
+
386
+
387
+ # Maps from field types to sizer constructors.
388
+ TYPE_TO_SIZER = {
389
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
390
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
391
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
392
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
393
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
394
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
395
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
396
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
397
+ _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
398
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
399
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
400
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
401
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
402
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
403
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
404
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
405
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
406
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
407
+ }
408
+
409
+
410
+ # Maps from field type to a decoder constructor.
411
+ TYPE_TO_DECODER = {
412
+ _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
413
+ _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
414
+ _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
415
+ _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
416
+ _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
417
+ _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
418
+ _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
419
+ _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
420
+ _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
421
+ _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
422
+ _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
423
+ _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
424
+ _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
425
+ _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
426
+ _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
427
+ _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
428
+ _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
429
+ _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
430
+ }
431
+
432
+ # Maps from field type to expected wiretype.
433
+ FIELD_TYPE_TO_WIRE_TYPE = {
434
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
435
+ _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
436
+ _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
437
+ _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
438
+ _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
439
+ _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
440
+ _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
441
+ _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
442
+ _FieldDescriptor.TYPE_STRING:
443
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
444
+ _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
445
+ _FieldDescriptor.TYPE_MESSAGE:
446
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
447
+ _FieldDescriptor.TYPE_BYTES:
448
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
449
+ _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
450
+ _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
451
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
452
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
453
+ _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
454
+ _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
455
+ }
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/well_known_types.py ADDED
@@ -0,0 +1,695 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains well known classes.
9
+
10
+ This files defines well known classes which need extra maintenance including:
11
+ - Any
12
+ - Duration
13
+ - FieldMask
14
+ - Struct
15
+ - Timestamp
16
+ """
17
+
18
+ __author__ = 'jieluo@google.com (Jie Luo)'
19
+
20
+ import calendar
21
+ import collections.abc
22
+ import datetime
23
+ from typing import Union
24
+ import warnings
25
+ from google.protobuf.internal import field_mask
26
+
27
+ FieldMask = field_mask.FieldMask
28
+
29
+ _TIMESTAMPFORMAT = '%Y-%m-%dT%H:%M:%S'
30
+ _NANOS_PER_SECOND = 1000000000
31
+ _NANOS_PER_MILLISECOND = 1000000
32
+ _NANOS_PER_MICROSECOND = 1000
33
+ _MILLIS_PER_SECOND = 1000
34
+ _MICROS_PER_SECOND = 1000000
35
+ _SECONDS_PER_DAY = 24 * 3600
36
+ _DURATION_SECONDS_MAX = 315576000000
37
+ _TIMESTAMP_SECONDS_MIN = -62135596800
38
+ _TIMESTAMP_SECONDS_MAX = 253402300799
39
+
40
+ _EPOCH_DATETIME_NAIVE = datetime.datetime(1970, 1, 1, tzinfo=None)
41
+ _EPOCH_DATETIME_AWARE = _EPOCH_DATETIME_NAIVE.replace(
42
+ tzinfo=datetime.timezone.utc
43
+ )
44
+
45
+
46
+ class Any(object):
47
+ """Class for Any Message type."""
48
+
49
+ __slots__ = ()
50
+
51
+ def Pack(
52
+ self, msg, type_url_prefix='type.googleapis.com/', deterministic=None
53
+ ):
54
+ """Packs the specified message into current Any message."""
55
+ if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
56
+ self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
57
+ else:
58
+ self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
59
+ self.value = msg.SerializeToString(deterministic=deterministic)
60
+
61
+ def Unpack(self, msg):
62
+ """Unpacks the current Any message into specified message."""
63
+ descriptor = msg.DESCRIPTOR
64
+ if not self.Is(descriptor):
65
+ return False
66
+ msg.ParseFromString(self.value)
67
+ return True
68
+
69
+ def TypeName(self):
70
+ """Returns the protobuf type name of the inner message."""
71
+ # Only last part is to be used: b/25630112
72
+ return self.type_url.rpartition('/')[2]
73
+
74
+ def Is(self, descriptor):
75
+ """Checks if this Any represents the given protobuf type."""
76
+ return '/' in self.type_url and self.TypeName() == descriptor.full_name
77
+
78
+
79
+ class Timestamp(object):
80
+ """Class for Timestamp message type."""
81
+
82
+ __slots__ = ()
83
+
84
+ def ToJsonString(self):
85
+ """Converts Timestamp to RFC 3339 date string format.
86
+
87
+ Returns:
88
+ A string converted from timestamp. The string is always Z-normalized
89
+ and uses 3, 6 or 9 fractional digits as required to represent the
90
+ exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
91
+ """
92
+ _CheckTimestampValid(self.seconds, self.nanos)
93
+ nanos = self.nanos
94
+ seconds = self.seconds % _SECONDS_PER_DAY
95
+ days = (self.seconds - seconds) // _SECONDS_PER_DAY
96
+ dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds)
97
+
98
+ result = dt.isoformat()
99
+ if (nanos % 1e9) == 0:
100
+ # If there are 0 fractional digits, the fractional
101
+ # point '.' should be omitted when serializing.
102
+ return result + 'Z'
103
+ if (nanos % 1e6) == 0:
104
+ # Serialize 3 fractional digits.
105
+ return result + '.%03dZ' % (nanos / 1e6)
106
+ if (nanos % 1e3) == 0:
107
+ # Serialize 6 fractional digits.
108
+ return result + '.%06dZ' % (nanos / 1e3)
109
+ # Serialize 9 fractional digits.
110
+ return result + '.%09dZ' % nanos
111
+
112
+ def FromJsonString(self, value):
113
+ """Parse a RFC 3339 date string format to Timestamp.
114
+
115
+ Args:
116
+ value: A date string. Any fractional digits (or none) and any offset are
117
+ accepted as long as they fit into nano-seconds precision. Example of
118
+ accepted format: '1972-01-01T10:00:20.021-05:00'
119
+
120
+ Raises:
121
+ ValueError: On parsing problems.
122
+ """
123
+ if not isinstance(value, str):
124
+ raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
125
+ timezone_offset = value.find('Z')
126
+ if timezone_offset == -1:
127
+ timezone_offset = value.find('+')
128
+ if timezone_offset == -1:
129
+ timezone_offset = value.rfind('-')
130
+ if timezone_offset == -1:
131
+ raise ValueError(
132
+ 'Failed to parse timestamp: missing valid timezone offset.'
133
+ )
134
+ time_value = value[0:timezone_offset]
135
+ # Parse datetime and nanos.
136
+ point_position = time_value.find('.')
137
+ if point_position == -1:
138
+ second_value = time_value
139
+ nano_value = ''
140
+ else:
141
+ second_value = time_value[:point_position]
142
+ nano_value = time_value[point_position + 1 :]
143
+ if 't' in second_value:
144
+ raise ValueError(
145
+ "time data '{0}' does not match format '%Y-%m-%dT%H:%M:%S', "
146
+ "lowercase 't' is not accepted".format(second_value)
147
+ )
148
+ date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFORMAT)
149
+ td = date_object - datetime.datetime(1970, 1, 1)
150
+ seconds = td.seconds + td.days * _SECONDS_PER_DAY
151
+ if len(nano_value) > 9:
152
+ raise ValueError(
153
+ 'Failed to parse Timestamp: nanos {0} more than '
154
+ '9 fractional digits.'.format(nano_value)
155
+ )
156
+ if nano_value:
157
+ nanos = round(float('0.' + nano_value) * 1e9)
158
+ else:
159
+ nanos = 0
160
+ # Parse timezone offsets.
161
+ if value[timezone_offset] == 'Z':
162
+ if len(value) != timezone_offset + 1:
163
+ raise ValueError(
164
+ 'Failed to parse timestamp: invalid trailing data {0}.'.format(
165
+ value
166
+ )
167
+ )
168
+ else:
169
+ timezone = value[timezone_offset:]
170
+ pos = timezone.find(':')
171
+ if pos == -1:
172
+ raise ValueError('Invalid timezone offset value: {0}.'.format(timezone))
173
+ if timezone[0] == '+':
174
+ seconds -= (int(timezone[1:pos]) * 60 + int(timezone[pos + 1 :])) * 60
175
+ else:
176
+ seconds += (int(timezone[1:pos]) * 60 + int(timezone[pos + 1 :])) * 60
177
+ # Set seconds and nanos
178
+ _CheckTimestampValid(seconds, nanos)
179
+ self.seconds = int(seconds)
180
+ self.nanos = int(nanos)
181
+
182
+ def GetCurrentTime(self):
183
+ """Get the current UTC into Timestamp."""
184
+ self.FromDatetime(datetime.datetime.now(tz=datetime.timezone.utc))
185
+
186
+ def ToNanoseconds(self):
187
+ """Converts Timestamp to nanoseconds since epoch."""
188
+ _CheckTimestampValid(self.seconds, self.nanos)
189
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
190
+
191
+ def ToMicroseconds(self):
192
+ """Converts Timestamp to microseconds since epoch."""
193
+ _CheckTimestampValid(self.seconds, self.nanos)
194
+ return (
195
+ self.seconds * _MICROS_PER_SECOND + self.nanos // _NANOS_PER_MICROSECOND
196
+ )
197
+
198
+ def ToMilliseconds(self):
199
+ """Converts Timestamp to milliseconds since epoch."""
200
+ _CheckTimestampValid(self.seconds, self.nanos)
201
+ return (
202
+ self.seconds * _MILLIS_PER_SECOND + self.nanos // _NANOS_PER_MILLISECOND
203
+ )
204
+
205
+ def ToSeconds(self):
206
+ """Converts Timestamp to seconds since epoch."""
207
+ _CheckTimestampValid(self.seconds, self.nanos)
208
+ return self.seconds
209
+
210
+ def FromNanoseconds(self, nanos):
211
+ """Converts nanoseconds since epoch to Timestamp."""
212
+ seconds = nanos // _NANOS_PER_SECOND
213
+ nanos = nanos % _NANOS_PER_SECOND
214
+ _CheckTimestampValid(seconds, nanos)
215
+ self.seconds = seconds
216
+ self.nanos = nanos
217
+
218
+ def FromMicroseconds(self, micros):
219
+ """Converts microseconds since epoch to Timestamp."""
220
+ seconds = micros // _MICROS_PER_SECOND
221
+ nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
222
+ _CheckTimestampValid(seconds, nanos)
223
+ self.seconds = seconds
224
+ self.nanos = nanos
225
+
226
+ def FromMilliseconds(self, millis):
227
+ """Converts milliseconds since epoch to Timestamp."""
228
+ seconds = millis // _MILLIS_PER_SECOND
229
+ nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
230
+ _CheckTimestampValid(seconds, nanos)
231
+ self.seconds = seconds
232
+ self.nanos = nanos
233
+
234
+ def FromSeconds(self, seconds):
235
+ """Converts seconds since epoch to Timestamp."""
236
+ _CheckTimestampValid(seconds, 0)
237
+ self.seconds = seconds
238
+ self.nanos = 0
239
+
240
+ def ToDatetime(self, tzinfo=None):
241
+ """Converts Timestamp to a datetime.
242
+
243
+ Args:
244
+ tzinfo: A datetime.tzinfo subclass; defaults to None.
245
+
246
+ Returns:
247
+ If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone
248
+ information, i.e. not aware that it's UTC).
249
+
250
+ Otherwise, returns a timezone-aware datetime in the input timezone.
251
+ """
252
+ # Using datetime.fromtimestamp for this would avoid constructing an extra
253
+ # timedelta object and possibly an extra datetime. Unfortunately, that has
254
+ # the disadvantage of not handling the full precision (on all platforms, see
255
+ # https://github.com/python/cpython/issues/109849) or full range (on some
256
+ # platforms, see https://github.com/python/cpython/issues/110042) of
257
+ # datetime.
258
+ _CheckTimestampValid(self.seconds, self.nanos)
259
+ delta = datetime.timedelta(
260
+ seconds=self.seconds,
261
+ microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND),
262
+ )
263
+ if tzinfo is None:
264
+ return _EPOCH_DATETIME_NAIVE + delta
265
+ else:
266
+ # Note the tz conversion has to come after the timedelta arithmetic.
267
+ return (_EPOCH_DATETIME_AWARE + delta).astimezone(tzinfo)
268
+
269
+ def FromDatetime(self, dt):
270
+ """Converts datetime to Timestamp.
271
+
272
+ Args:
273
+ dt: A datetime. If it's timezone-naive, it's assumed to be in UTC.
274
+ """
275
+ # Using this guide: http://wiki.python.org/moin/WorkingWithTime
276
+ # And this conversion guide: http://docs.python.org/library/time.html
277
+
278
+ # Turn the date parameter into a tuple (struct_time) that can then be
279
+ # manipulated into a long value of seconds. During the conversion from
280
+ # struct_time to long, the source date in UTC, and so it follows that the
281
+ # correct transformation is calendar.timegm()
282
+ try:
283
+ seconds = calendar.timegm(dt.utctimetuple())
284
+ nanos = dt.microsecond * _NANOS_PER_MICROSECOND
285
+ except AttributeError as e:
286
+ raise AttributeError(
287
+ 'Fail to convert to Timestamp. Expected a datetime like '
288
+ 'object got {0} : {1}'.format(type(dt).__name__, e)
289
+ ) from e
290
+ _CheckTimestampValid(seconds, nanos)
291
+ self.seconds = seconds
292
+ self.nanos = nanos
293
+
294
+ def _internal_assign(self, dt):
295
+ self.FromDatetime(dt)
296
+
297
+ def __add__(self, value) -> datetime.datetime:
298
+ if isinstance(value, Duration):
299
+ return self.ToDatetime() + value.ToTimedelta()
300
+ return self.ToDatetime() + value
301
+
302
+ __radd__ = __add__
303
+
304
+ def __sub__(self, value) -> Union[datetime.datetime, datetime.timedelta]:
305
+ if isinstance(value, Timestamp):
306
+ return self.ToDatetime() - value.ToDatetime()
307
+ elif isinstance(value, Duration):
308
+ return self.ToDatetime() - value.ToTimedelta()
309
+ return self.ToDatetime() - value
310
+
311
+ def __rsub__(self, dt) -> datetime.timedelta:
312
+ return dt - self.ToDatetime()
313
+
314
+
315
+ def _CheckTimestampValid(seconds, nanos):
316
+ if seconds < _TIMESTAMP_SECONDS_MIN or seconds > _TIMESTAMP_SECONDS_MAX:
317
+ raise ValueError(
318
+ 'Timestamp is not valid: Seconds {0} must be in range '
319
+ '[-62135596800, 253402300799].'.format(seconds))
320
+ if nanos < 0 or nanos >= _NANOS_PER_SECOND:
321
+ raise ValueError(
322
+ 'Timestamp is not valid: Nanos {} must be in a range '
323
+ '[0, 999999].'.format(nanos)
324
+ )
325
+
326
+
327
+ class Duration(object):
328
+ """Class for Duration message type."""
329
+
330
+ __slots__ = ()
331
+
332
+ def ToJsonString(self):
333
+ """Converts Duration to string format.
334
+
335
+ Returns:
336
+ A string converted from self. The string format will contains
337
+ 3, 6, or 9 fractional digits depending on the precision required to
338
+ represent the exact Duration value. For example: "1s", "1.010s",
339
+ "1.000000100s", "-3.100s"
340
+ """
341
+ _CheckDurationValid(self.seconds, self.nanos)
342
+ if self.seconds < 0 or self.nanos < 0:
343
+ result = '-'
344
+ seconds = -self.seconds + int((0 - self.nanos) // 1e9)
345
+ nanos = (0 - self.nanos) % 1e9
346
+ else:
347
+ result = ''
348
+ seconds = self.seconds + int(self.nanos // 1e9)
349
+ nanos = self.nanos % 1e9
350
+ result += '%d' % seconds
351
+ if (nanos % 1e9) == 0:
352
+ # If there are 0 fractional digits, the fractional
353
+ # point '.' should be omitted when serializing.
354
+ return result + 's'
355
+ if (nanos % 1e6) == 0:
356
+ # Serialize 3 fractional digits.
357
+ return result + '.%03ds' % (nanos / 1e6)
358
+ if (nanos % 1e3) == 0:
359
+ # Serialize 6 fractional digits.
360
+ return result + '.%06ds' % (nanos / 1e3)
361
+ # Serialize 9 fractional digits.
362
+ return result + '.%09ds' % nanos
363
+
364
+ def FromJsonString(self, value):
365
+ """Converts a string to Duration.
366
+
367
+ Args:
368
+ value: A string to be converted. The string must end with 's'. Any
369
+ fractional digits (or none) are accepted as long as they fit into
370
+ precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
371
+
372
+ Raises:
373
+ ValueError: On parsing problems.
374
+ """
375
+ if not isinstance(value, str):
376
+ raise ValueError('Duration JSON value not a string: {!r}'.format(value))
377
+ if len(value) < 1 or value[-1] != 's':
378
+ raise ValueError('Duration must end with letter "s": {0}.'.format(value))
379
+ try:
380
+ pos = value.find('.')
381
+ if pos == -1:
382
+ seconds = int(value[:-1])
383
+ nanos = 0
384
+ else:
385
+ seconds = int(value[:pos])
386
+ if value[0] == '-':
387
+ nanos = int(round(float('-0{0}'.format(value[pos:-1])) * 1e9))
388
+ else:
389
+ nanos = int(round(float('0{0}'.format(value[pos:-1])) * 1e9))
390
+ _CheckDurationValid(seconds, nanos)
391
+ self.seconds = seconds
392
+ self.nanos = nanos
393
+ except ValueError as e:
394
+ raise ValueError("Couldn't parse duration: {0} : {1}.".format(value, e))
395
+
396
+ def ToNanoseconds(self):
397
+ """Converts a Duration to nanoseconds."""
398
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
399
+
400
+ def ToMicroseconds(self):
401
+ """Converts a Duration to microseconds."""
402
+ micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
403
+ return self.seconds * _MICROS_PER_SECOND + micros
404
+
405
+ def ToMilliseconds(self):
406
+ """Converts a Duration to milliseconds."""
407
+ millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
408
+ return self.seconds * _MILLIS_PER_SECOND + millis
409
+
410
+ def ToSeconds(self):
411
+ """Converts a Duration to seconds."""
412
+ return self.seconds
413
+
414
+ def FromNanoseconds(self, nanos):
415
+ """Converts nanoseconds to Duration."""
416
+ self._NormalizeDuration(
417
+ nanos // _NANOS_PER_SECOND, nanos % _NANOS_PER_SECOND
418
+ )
419
+
420
+ def FromMicroseconds(self, micros):
421
+ """Converts microseconds to Duration."""
422
+ self._NormalizeDuration(
423
+ micros // _MICROS_PER_SECOND,
424
+ (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND,
425
+ )
426
+
427
+ def FromMilliseconds(self, millis):
428
+ """Converts milliseconds to Duration."""
429
+ self._NormalizeDuration(
430
+ millis // _MILLIS_PER_SECOND,
431
+ (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND,
432
+ )
433
+
434
+ def FromSeconds(self, seconds):
435
+ """Converts seconds to Duration."""
436
+ self.seconds = seconds
437
+ self.nanos = 0
438
+
439
+ def ToTimedelta(self) -> datetime.timedelta:
440
+ """Converts Duration to timedelta."""
441
+ return datetime.timedelta(
442
+ seconds=self.seconds,
443
+ microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND),
444
+ )
445
+
446
+ def FromTimedelta(self, td):
447
+ """Converts timedelta to Duration."""
448
+ try:
449
+ self._NormalizeDuration(
450
+ td.seconds + td.days * _SECONDS_PER_DAY,
451
+ td.microseconds * _NANOS_PER_MICROSECOND,
452
+ )
453
+ except AttributeError as e:
454
+ raise AttributeError(
455
+ 'Fail to convert to Duration. Expected a timedelta like '
456
+ 'object got {0}: {1}'.format(type(td).__name__, e)
457
+ ) from e
458
+
459
+ def _internal_assign(self, td):
460
+ self.FromTimedelta(td)
461
+
462
+ def _NormalizeDuration(self, seconds, nanos):
463
+ """Set Duration by seconds and nanos."""
464
+ # Force nanos to be negative if the duration is negative.
465
+ if seconds < 0 and nanos > 0:
466
+ seconds += 1
467
+ nanos -= _NANOS_PER_SECOND
468
+ self.seconds = seconds
469
+ self.nanos = nanos
470
+
471
+ def __add__(self, value) -> Union[datetime.datetime, datetime.timedelta]:
472
+ if isinstance(value, Timestamp):
473
+ return self.ToTimedelta() + value.ToDatetime()
474
+ return self.ToTimedelta() + value
475
+
476
+ __radd__ = __add__
477
+
478
+ def __sub__(self, value) -> datetime.timedelta:
479
+ return self.ToTimedelta() - value
480
+
481
+ def __rsub__(self, value) -> Union[datetime.datetime, datetime.timedelta]:
482
+ return value - self.ToTimedelta()
483
+
484
+
485
+ def _CheckDurationValid(seconds, nanos):
486
+ if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
487
+ raise ValueError(
488
+ 'Duration is not valid: Seconds {0} must be in range '
489
+ '[-315576000000, 315576000000].'.format(seconds)
490
+ )
491
+ if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
492
+ raise ValueError(
493
+ 'Duration is not valid: Nanos {0} must be in range '
494
+ '[-999999999, 999999999].'.format(nanos)
495
+ )
496
+ if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
497
+ raise ValueError('Duration is not valid: Sign mismatch.')
498
+
499
+
500
+ def _RoundTowardZero(value, divider):
501
+ """Truncates the remainder part after division."""
502
+ # For some languages, the sign of the remainder is implementation
503
+ # dependent if any of the operands is negative. Here we enforce
504
+ # "rounded toward zero" semantics. For example, for (-5) / 2 an
505
+ # implementation may give -3 as the result with the remainder being
506
+ # 1. This function ensures we always return -2 (closer to zero).
507
+ result = value // divider
508
+ remainder = value % divider
509
+ if result < 0 and remainder > 0:
510
+ return result + 1
511
+ else:
512
+ return result
513
+
514
+
515
+ def _SetStructValue(struct_value, value):
516
+ if value is None:
517
+ struct_value.null_value = 0
518
+ elif isinstance(value, bool):
519
+ # Note: this check must come before the number check because in Python
520
+ # True and False are also considered numbers.
521
+ struct_value.bool_value = value
522
+ elif isinstance(value, str):
523
+ struct_value.string_value = value
524
+ elif isinstance(value, (int, float)):
525
+ struct_value.number_value = value
526
+ elif isinstance(value, (dict, Struct)):
527
+ struct_value.struct_value.Clear()
528
+ struct_value.struct_value.update(value)
529
+ elif isinstance(value, (list, tuple, ListValue)):
530
+ struct_value.list_value.Clear()
531
+ struct_value.list_value.extend(value)
532
+ else:
533
+ raise ValueError('Unexpected type')
534
+
535
+
536
+ def _GetStructValue(struct_value):
537
+ which = struct_value.WhichOneof('kind')
538
+ if which == 'struct_value':
539
+ return struct_value.struct_value
540
+ elif which == 'null_value':
541
+ return None
542
+ elif which == 'number_value':
543
+ return struct_value.number_value
544
+ elif which == 'string_value':
545
+ return struct_value.string_value
546
+ elif which == 'bool_value':
547
+ return struct_value.bool_value
548
+ elif which == 'list_value':
549
+ return struct_value.list_value
550
+ elif which is None:
551
+ raise ValueError('Value not set')
552
+
553
+
554
+ class Struct(object):
555
+ """Class for Struct message type."""
556
+
557
+ __slots__ = ()
558
+
559
+ def __getitem__(self, key):
560
+ return _GetStructValue(self.fields[key])
561
+
562
+ def __setitem__(self, key, value):
563
+ _SetStructValue(self.fields[key], value)
564
+
565
+ def __delitem__(self, key):
566
+ del self.fields[key]
567
+
568
+ def __len__(self):
569
+ return len(self.fields)
570
+
571
+ def __iter__(self):
572
+ return iter(self.fields)
573
+
574
+ def _internal_assign(self, dictionary):
575
+ self.Clear()
576
+ self.update(dictionary)
577
+
578
+ def _internal_compare(self, other):
579
+ size = len(self)
580
+ if size != len(other):
581
+ return False
582
+ for key, value in self.items():
583
+ if key not in other:
584
+ return False
585
+ if isinstance(other[key], (dict, list)):
586
+ if not value._internal_compare(other[key]):
587
+ return False
588
+ elif value != other[key]:
589
+ return False
590
+ return True
591
+
592
+ def keys(self): # pylint: disable=invalid-name
593
+ return self.fields.keys()
594
+
595
+ def values(self): # pylint: disable=invalid-name
596
+ return [self[key] for key in self]
597
+
598
+ def items(self): # pylint: disable=invalid-name
599
+ return [(key, self[key]) for key in self]
600
+
601
+ def get_or_create_list(self, key):
602
+ """Returns a list for this key, creating if it didn't exist already."""
603
+ if not self.fields[key].HasField('list_value'):
604
+ # Clear will mark list_value modified which will indeed create a list.
605
+ self.fields[key].list_value.Clear()
606
+ return self.fields[key].list_value
607
+
608
+ def get_or_create_struct(self, key):
609
+ """Returns a struct for this key, creating if it didn't exist already."""
610
+ if not self.fields[key].HasField('struct_value'):
611
+ # Clear will mark struct_value modified which will indeed create a struct.
612
+ self.fields[key].struct_value.Clear()
613
+ return self.fields[key].struct_value
614
+
615
+ def update(self, dictionary): # pylint: disable=invalid-name
616
+ for key, value in dictionary.items():
617
+ _SetStructValue(self.fields[key], value)
618
+
619
+
620
+ collections.abc.MutableMapping.register(Struct)
621
+
622
+
623
+ class ListValue(object):
624
+ """Class for ListValue message type."""
625
+
626
+ __slots__ = ()
627
+
628
+ def __len__(self):
629
+ return len(self.values)
630
+
631
+ def append(self, value):
632
+ _SetStructValue(self.values.add(), value)
633
+
634
+ def extend(self, elem_seq):
635
+ for value in elem_seq:
636
+ self.append(value)
637
+
638
+ def __getitem__(self, index):
639
+ """Retrieves item by the specified index."""
640
+ return _GetStructValue(self.values.__getitem__(index))
641
+
642
+ def __setitem__(self, index, value):
643
+ _SetStructValue(self.values.__getitem__(index), value)
644
+
645
+ def __delitem__(self, key):
646
+ del self.values[key]
647
+
648
+ def _internal_assign(self, elem_seq):
649
+ self.Clear()
650
+ self.extend(elem_seq)
651
+
652
+ def _internal_compare(self, other):
653
+ size = len(self)
654
+ if size != len(other):
655
+ return False
656
+ for i in range(size):
657
+ if isinstance(other[i], (dict, list)):
658
+ if not self[i]._internal_compare(other[i]):
659
+ return False
660
+ elif self[i] != other[i]:
661
+ return False
662
+ return True
663
+
664
+ def items(self):
665
+ for i in range(len(self)):
666
+ yield self[i]
667
+
668
+ def add_struct(self):
669
+ """Appends and returns a struct value as the next value in the list."""
670
+ struct_value = self.values.add().struct_value
671
+ # Clear will mark struct_value modified which will indeed create a struct.
672
+ struct_value.Clear()
673
+ return struct_value
674
+
675
+ def add_list(self):
676
+ """Appends and returns a list value as the next value in the list."""
677
+ list_value = self.values.add().list_value
678
+ # Clear will mark list_value modified which will indeed create a list.
679
+ list_value.Clear()
680
+ return list_value
681
+
682
+
683
+ collections.abc.MutableSequence.register(ListValue)
684
+
685
+
686
+ # LINT.IfChange(wktbases)
687
+ WKTBASES = {
688
+ 'google.protobuf.Any': Any,
689
+ 'google.protobuf.Duration': Duration,
690
+ 'google.protobuf.FieldMask': FieldMask,
691
+ 'google.protobuf.ListValue': ListValue,
692
+ 'google.protobuf.Struct': Struct,
693
+ 'google.protobuf.Timestamp': Timestamp,
694
+ }
695
+ # LINT.ThenChange(//depot/google.protobuf/compiler/python/pyi_generator.cc:wktbases)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/internal/wire_format.py ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Constants and static functions to support protocol buffer wire format."""
9
+
10
+ __author__ = 'robinson@google.com (Will Robinson)'
11
+
12
+ import struct
13
+ from google.protobuf import descriptor
14
+ from google.protobuf import message
15
+
16
+
17
+ TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
18
+ TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
19
+
20
+ # These numbers identify the wire type of a protocol buffer value.
21
+ # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
22
+ # tag-and-type to store one of these WIRETYPE_* constants.
23
+ # These values must match WireType enum in //google/protobuf/wire_format.h.
24
+ WIRETYPE_VARINT = 0
25
+ WIRETYPE_FIXED64 = 1
26
+ WIRETYPE_LENGTH_DELIMITED = 2
27
+ WIRETYPE_START_GROUP = 3
28
+ WIRETYPE_END_GROUP = 4
29
+ WIRETYPE_FIXED32 = 5
30
+ _WIRETYPE_MAX = 5
31
+
32
+
33
+ # Bounds for various integer types.
34
+ INT32_MAX = int((1 << 31) - 1)
35
+ INT32_MIN = int(-(1 << 31))
36
+ UINT32_MAX = (1 << 32) - 1
37
+
38
+ INT64_MAX = (1 << 63) - 1
39
+ INT64_MIN = -(1 << 63)
40
+ UINT64_MAX = (1 << 64) - 1
41
+
42
+ # "struct" format strings that will encode/decode the specified formats.
43
+ FORMAT_UINT32_LITTLE_ENDIAN = '<I'
44
+ FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
45
+ FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
46
+ FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
47
+
48
+
49
+ # We'll have to provide alternate implementations of AppendLittleEndian*() on
50
+ # any architectures where these checks fail.
51
+ if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
52
+ raise AssertionError('Format "I" is not a 32-bit number.')
53
+ if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
54
+ raise AssertionError('Format "Q" is not a 64-bit number.')
55
+
56
+
57
+ def PackTag(field_number, wire_type):
58
+ """Returns an unsigned 32-bit integer that encodes the field number and
59
+ wire type information in standard protocol message wire format.
60
+
61
+ Args:
62
+ field_number: Expected to be an integer in the range [1, 1 << 29)
63
+ wire_type: One of the WIRETYPE_* constants.
64
+ """
65
+ if not 0 <= wire_type <= _WIRETYPE_MAX:
66
+ raise message.EncodeError('Unknown wire type: %d' % wire_type)
67
+ return (field_number << TAG_TYPE_BITS) | wire_type
68
+
69
+
70
+ def UnpackTag(tag):
71
+ """The inverse of PackTag(). Given an unsigned 32-bit number,
72
+ returns a (field_number, wire_type) tuple.
73
+ """
74
+ return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
75
+
76
+
77
+ def ZigZagEncode(value):
78
+ """ZigZag Transform: Encodes signed integers so that they can be
79
+ effectively used with varint encoding. See wire_format.h for
80
+ more details.
81
+ """
82
+ if value >= 0:
83
+ return value << 1
84
+ return (value << 1) ^ (~0)
85
+
86
+
87
+ def ZigZagDecode(value):
88
+ """Inverse of ZigZagEncode()."""
89
+ if not value & 0x1:
90
+ return value >> 1
91
+ return (value >> 1) ^ (~0)
92
+
93
+
94
+
95
+ # The *ByteSize() functions below return the number of bytes required to
96
+ # serialize "field number + type" information and then serialize the value.
97
+
98
+
99
+ def Int32ByteSize(field_number, int32):
100
+ return Int64ByteSize(field_number, int32)
101
+
102
+
103
+ def Int32ByteSizeNoTag(int32):
104
+ return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
105
+
106
+
107
+ def Int64ByteSize(field_number, int64):
108
+ # Have to convert to uint before calling UInt64ByteSize().
109
+ return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
110
+
111
+
112
+ def UInt32ByteSize(field_number, uint32):
113
+ return UInt64ByteSize(field_number, uint32)
114
+
115
+
116
+ def UInt64ByteSize(field_number, uint64):
117
+ return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
118
+
119
+
120
+ def SInt32ByteSize(field_number, int32):
121
+ return UInt32ByteSize(field_number, ZigZagEncode(int32))
122
+
123
+
124
+ def SInt64ByteSize(field_number, int64):
125
+ return UInt64ByteSize(field_number, ZigZagEncode(int64))
126
+
127
+
128
+ def Fixed32ByteSize(field_number, fixed32):
129
+ return TagByteSize(field_number) + 4
130
+
131
+
132
+ def Fixed64ByteSize(field_number, fixed64):
133
+ return TagByteSize(field_number) + 8
134
+
135
+
136
+ def SFixed32ByteSize(field_number, sfixed32):
137
+ return TagByteSize(field_number) + 4
138
+
139
+
140
+ def SFixed64ByteSize(field_number, sfixed64):
141
+ return TagByteSize(field_number) + 8
142
+
143
+
144
+ def FloatByteSize(field_number, flt):
145
+ return TagByteSize(field_number) + 4
146
+
147
+
148
+ def DoubleByteSize(field_number, double):
149
+ return TagByteSize(field_number) + 8
150
+
151
+
152
+ def BoolByteSize(field_number, b):
153
+ return TagByteSize(field_number) + 1
154
+
155
+
156
+ def EnumByteSize(field_number, enum):
157
+ return UInt32ByteSize(field_number, enum)
158
+
159
+
160
+ def StringByteSize(field_number, string):
161
+ return BytesByteSize(field_number, string.encode('utf-8'))
162
+
163
+
164
+ def BytesByteSize(field_number, b):
165
+ return (TagByteSize(field_number)
166
+ + _VarUInt64ByteSizeNoTag(len(b))
167
+ + len(b))
168
+
169
+
170
+ def GroupByteSize(field_number, message):
171
+ return (2 * TagByteSize(field_number) # START and END group.
172
+ + message.ByteSize())
173
+
174
+
175
+ def MessageByteSize(field_number, message):
176
+ return (TagByteSize(field_number)
177
+ + _VarUInt64ByteSizeNoTag(message.ByteSize())
178
+ + message.ByteSize())
179
+
180
+
181
+ def MessageSetItemByteSize(field_number, msg):
182
+ # First compute the sizes of the tags.
183
+ # There are 2 tags for the beginning and ending of the repeated group, that
184
+ # is field number 1, one with field number 2 (type_id) and one with field
185
+ # number 3 (message).
186
+ total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
187
+
188
+ # Add the number of bytes for type_id.
189
+ total_size += _VarUInt64ByteSizeNoTag(field_number)
190
+
191
+ message_size = msg.ByteSize()
192
+
193
+ # The number of bytes for encoding the length of the message.
194
+ total_size += _VarUInt64ByteSizeNoTag(message_size)
195
+
196
+ # The size of the message.
197
+ total_size += message_size
198
+ return total_size
199
+
200
+
201
+ def TagByteSize(field_number):
202
+ """Returns the bytes required to serialize a tag with this field number."""
203
+ # Just pass in type 0, since the type won't affect the tag+type size.
204
+ return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
205
+
206
+
207
+ # Private helper function for the *ByteSize() functions above.
208
+
209
+ def _VarUInt64ByteSizeNoTag(uint64):
210
+ """Returns the number of bytes required to serialize a single varint
211
+ using boundary value comparisons. (unrolled loop optimization -WPierce)
212
+ uint64 must be unsigned.
213
+ """
214
+ if uint64 <= 0x7f: return 1
215
+ if uint64 <= 0x3fff: return 2
216
+ if uint64 <= 0x1fffff: return 3
217
+ if uint64 <= 0xfffffff: return 4
218
+ if uint64 <= 0x7ffffffff: return 5
219
+ if uint64 <= 0x3ffffffffff: return 6
220
+ if uint64 <= 0x1ffffffffffff: return 7
221
+ if uint64 <= 0xffffffffffffff: return 8
222
+ if uint64 <= 0x7fffffffffffffff: return 9
223
+ if uint64 > UINT64_MAX:
224
+ raise message.EncodeError('Value out of range: %d' % uint64)
225
+ return 10
226
+
227
+
228
+ NON_PACKABLE_TYPES = (
229
+ descriptor.FieldDescriptor.TYPE_STRING,
230
+ descriptor.FieldDescriptor.TYPE_GROUP,
231
+ descriptor.FieldDescriptor.TYPE_MESSAGE,
232
+ descriptor.FieldDescriptor.TYPE_BYTES
233
+ )
234
+
235
+
236
+ def IsTypePackable(field_type):
237
+ """Return true iff packable = true is valid for fields of this type.
238
+
239
+ Args:
240
+ field_type: a FieldDescriptor::Type value.
241
+
242
+ Returns:
243
+ True iff fields of this type are packable.
244
+ """
245
+ return field_type not in NON_PACKABLE_TYPES
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/json_format.py ADDED
@@ -0,0 +1,1107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains routines for printing protocol messages in JSON format.
9
+
10
+ Simple usage example:
11
+
12
+ # Create a proto object and serialize it to a json format string.
13
+ message = my_proto_pb2.MyMessage(foo='bar')
14
+ json_string = json_format.MessageToJson(message)
15
+
16
+ # Parse a json format string to proto object.
17
+ message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
18
+ """
19
+
20
+ __author__ = 'jieluo@google.com (Jie Luo)'
21
+
22
+
23
+ import base64
24
+ from collections import OrderedDict
25
+ import json
26
+ import math
27
+ from operator import methodcaller
28
+ import re
29
+ import warnings
30
+
31
+ from google.protobuf import descriptor
32
+ from google.protobuf import message_factory
33
+ from google.protobuf import symbol_database
34
+ from google.protobuf.internal import type_checkers
35
+
36
+
37
+ _INT_TYPES = frozenset([
38
+ descriptor.FieldDescriptor.CPPTYPE_INT32,
39
+ descriptor.FieldDescriptor.CPPTYPE_UINT32,
40
+ descriptor.FieldDescriptor.CPPTYPE_INT64,
41
+ descriptor.FieldDescriptor.CPPTYPE_UINT64,
42
+ ])
43
+ _INT64_TYPES = frozenset([
44
+ descriptor.FieldDescriptor.CPPTYPE_INT64,
45
+ descriptor.FieldDescriptor.CPPTYPE_UINT64,
46
+ ])
47
+ _FLOAT_TYPES = frozenset([
48
+ descriptor.FieldDescriptor.CPPTYPE_FLOAT,
49
+ descriptor.FieldDescriptor.CPPTYPE_DOUBLE,
50
+ ])
51
+ _INFINITY = 'Infinity'
52
+ _NEG_INFINITY = '-Infinity'
53
+ _NAN = 'NaN'
54
+
55
+ _UNPAIRED_SURROGATE_PATTERN = re.compile(
56
+ '[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]'
57
+ )
58
+
59
+ _VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
60
+
61
+
62
+ class Error(Exception):
63
+ """Top-level module error for json_format."""
64
+
65
+
66
+ class SerializeToJsonError(Error):
67
+ """Thrown if serialization to JSON fails."""
68
+
69
+
70
+ class ParseError(Error):
71
+ """Thrown in case of parsing error."""
72
+
73
+
74
+ class EnumStringValueParseError(ParseError):
75
+ """Thrown if unknown string enum value is encountered.
76
+
77
+ This exception is suppressed if ignore_unknown_fields is set.
78
+ """
79
+
80
+
81
+ def MessageToJson(
82
+ message,
83
+ preserving_proto_field_name=False,
84
+ indent=2,
85
+ sort_keys=False,
86
+ use_integers_for_enums=False,
87
+ descriptor_pool=None,
88
+ float_precision=None,
89
+ ensure_ascii=True,
90
+ always_print_fields_with_no_presence=False,
91
+ ):
92
+ """Converts protobuf message to JSON format.
93
+
94
+ Args:
95
+ message: The protocol buffers message instance to serialize.
96
+ always_print_fields_with_no_presence: If True, fields without presence
97
+ (implicit presence scalars, repeated fields, and map fields) will always
98
+ be serialized. Any field that supports presence is not affected by this
99
+ option (including singular message fields and oneof fields).
100
+ preserving_proto_field_name: If True, use the original proto field names as
101
+ defined in the .proto file. If False, convert the field names to
102
+ lowerCamelCase.
103
+ indent: The JSON object will be pretty-printed with this indent level. An
104
+ indent level of 0 or negative will only insert newlines. If the indent
105
+ level is None, no newlines will be inserted.
106
+ sort_keys: If True, then the output will be sorted by field names.
107
+ use_integers_for_enums: If true, print integers instead of enum names.
108
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
109
+ default.
110
+ float_precision: Deprecated. If set, use this to specify float field valid
111
+ digits.
112
+ ensure_ascii: If True, strings with non-ASCII characters are escaped. If
113
+ False, Unicode strings are returned unchanged.
114
+
115
+ Returns:
116
+ A string containing the JSON formatted protocol buffer message.
117
+ """
118
+ printer = _Printer(
119
+ preserving_proto_field_name,
120
+ use_integers_for_enums,
121
+ descriptor_pool,
122
+ float_precision,
123
+ always_print_fields_with_no_presence,
124
+ )
125
+ return printer.ToJsonString(message, indent, sort_keys, ensure_ascii)
126
+
127
+
128
+ def MessageToDict(
129
+ message,
130
+ always_print_fields_with_no_presence=False,
131
+ preserving_proto_field_name=False,
132
+ use_integers_for_enums=False,
133
+ descriptor_pool=None,
134
+ float_precision=None,
135
+ ):
136
+ """Converts protobuf message to a dictionary.
137
+
138
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
139
+
140
+ Args:
141
+ message: The protocol buffers message instance to serialize.
142
+ always_print_fields_with_no_presence: If True, fields without presence
143
+ (implicit presence scalars, repeated fields, and map fields) will always
144
+ be serialized. Any field that supports presence is not affected by this
145
+ option (including singular message fields and oneof fields).
146
+ preserving_proto_field_name: If True, use the original proto field names as
147
+ defined in the .proto file. If False, convert the field names to
148
+ lowerCamelCase.
149
+ use_integers_for_enums: If true, print integers instead of enum names.
150
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
151
+ default.
152
+ float_precision: Deprecated. If set, use this to specify float field valid
153
+ digits.
154
+
155
+ Returns:
156
+ A dict representation of the protocol buffer message.
157
+ """
158
+ printer = _Printer(
159
+ preserving_proto_field_name,
160
+ use_integers_for_enums,
161
+ descriptor_pool,
162
+ float_precision,
163
+ always_print_fields_with_no_presence,
164
+ )
165
+ # pylint: disable=protected-access
166
+ return printer._MessageToJsonObject(message)
167
+
168
+
169
+ def _IsMapEntry(field):
170
+ return (
171
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE
172
+ and field.message_type.has_options
173
+ and field.message_type.GetOptions().map_entry
174
+ )
175
+
176
+
177
+ class _Printer(object):
178
+ """JSON format printer for protocol message."""
179
+
180
+ def __init__(
181
+ self,
182
+ preserving_proto_field_name=False,
183
+ use_integers_for_enums=False,
184
+ descriptor_pool=None,
185
+ float_precision=None,
186
+ always_print_fields_with_no_presence=False,
187
+ ):
188
+ self.always_print_fields_with_no_presence = (
189
+ always_print_fields_with_no_presence
190
+ )
191
+ self.preserving_proto_field_name = preserving_proto_field_name
192
+ self.use_integers_for_enums = use_integers_for_enums
193
+ self.descriptor_pool = descriptor_pool
194
+ if float_precision:
195
+ warnings.warn(
196
+ 'float_precision option is deprecated for json_format. '
197
+ 'This will turn into error in 7.34.0, please remove it '
198
+ 'before that.'
199
+ )
200
+ self.float_format = '.{}g'.format(float_precision)
201
+ else:
202
+ self.float_format = None
203
+
204
+ def ToJsonString(self, message, indent, sort_keys, ensure_ascii):
205
+ js = self._MessageToJsonObject(message)
206
+ return json.dumps(
207
+ js, indent=indent, sort_keys=sort_keys, ensure_ascii=ensure_ascii
208
+ )
209
+
210
+ def _MessageToJsonObject(self, message):
211
+ """Converts message to an object according to Proto3 JSON Specification."""
212
+ message_descriptor = message.DESCRIPTOR
213
+ full_name = message_descriptor.full_name
214
+ if _IsWrapperMessage(message_descriptor):
215
+ return self._WrapperMessageToJsonObject(message)
216
+ if full_name in _WKTJSONMETHODS:
217
+ return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
218
+ js = {}
219
+ return self._RegularMessageToJsonObject(message, js)
220
+
221
+ def _RegularMessageToJsonObject(self, message, js):
222
+ """Converts normal message according to Proto3 JSON Specification."""
223
+ fields = message.ListFields()
224
+
225
+ try:
226
+ for field, value in fields:
227
+ if self.preserving_proto_field_name:
228
+ name = field.name
229
+ else:
230
+ name = field.json_name
231
+ if _IsMapEntry(field):
232
+ # Convert a map field.
233
+ v_field = field.message_type.fields_by_name['value']
234
+ js_map = {}
235
+ for key in value:
236
+ if isinstance(key, bool):
237
+ if key:
238
+ recorded_key = 'true'
239
+ else:
240
+ recorded_key = 'false'
241
+ else:
242
+ recorded_key = str(key)
243
+ js_map[recorded_key] = self._FieldToJsonObject(v_field, value[key])
244
+ js[name] = js_map
245
+ elif field.is_repeated:
246
+ # Convert a repeated field.
247
+ js[name] = [self._FieldToJsonObject(field, k) for k in value]
248
+ elif field.is_extension:
249
+ name = '[%s]' % field.full_name
250
+ js[name] = self._FieldToJsonObject(field, value)
251
+ else:
252
+ js[name] = self._FieldToJsonObject(field, value)
253
+
254
+ # Serialize default value if including_default_value_fields is True.
255
+ if (
256
+ self.always_print_fields_with_no_presence
257
+ ):
258
+ message_descriptor = message.DESCRIPTOR
259
+ for field in message_descriptor.fields:
260
+
261
+ # always_print_fields_with_no_presence doesn't apply to
262
+ # any field which supports presence.
263
+ if self.always_print_fields_with_no_presence and field.has_presence:
264
+ continue
265
+
266
+ if self.preserving_proto_field_name:
267
+ name = field.name
268
+ else:
269
+ name = field.json_name
270
+ if name in js:
271
+ # Skip the field which has been serialized already.
272
+ continue
273
+ if _IsMapEntry(field):
274
+ js[name] = {}
275
+ elif field.is_repeated:
276
+ js[name] = []
277
+ else:
278
+ js[name] = self._FieldToJsonObject(field, field.default_value)
279
+
280
+ except ValueError as e:
281
+ raise SerializeToJsonError(
282
+ 'Failed to serialize {0} field: {1}.'.format(field.name, e)
283
+ ) from e
284
+
285
+ return js
286
+
287
+ def _FieldToJsonObject(self, field, value):
288
+ """Converts field value according to Proto3 JSON Specification."""
289
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
290
+ return self._MessageToJsonObject(value)
291
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
292
+ if self.use_integers_for_enums:
293
+ return value
294
+ if field.enum_type.full_name == 'google.protobuf.NullValue':
295
+ return None
296
+ enum_value = field.enum_type.values_by_number.get(value, None)
297
+ if enum_value is not None:
298
+ return enum_value.name
299
+ else:
300
+ if field.enum_type.is_closed:
301
+ raise SerializeToJsonError(
302
+ 'Enum field contains an integer value '
303
+ 'which can not mapped to an enum value.'
304
+ )
305
+ else:
306
+ return value
307
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
308
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
309
+ # Use base64 Data encoding for bytes
310
+ return base64.b64encode(value).decode('utf-8')
311
+ else:
312
+ return str(value)
313
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
314
+ return bool(value)
315
+ elif field.cpp_type in _INT64_TYPES:
316
+ return str(value)
317
+ elif field.cpp_type in _FLOAT_TYPES:
318
+ if math.isinf(value):
319
+ if value < 0.0:
320
+ return _NEG_INFINITY
321
+ else:
322
+ return _INFINITY
323
+ if math.isnan(value):
324
+ return _NAN
325
+ if self.float_format:
326
+ return float(format(value, self.float_format))
327
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
328
+ return type_checkers.ToShortestFloat(value)
329
+
330
+ return value
331
+
332
+ def _AnyMessageToJsonObject(self, message):
333
+ """Converts Any message according to Proto3 JSON Specification."""
334
+ if not message.ListFields():
335
+ return {}
336
+ # Must print @type first, use OrderedDict instead of {}
337
+ js = OrderedDict()
338
+ type_url = message.type_url
339
+ js['@type'] = type_url
340
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
341
+ sub_message.ParseFromString(message.value)
342
+ message_descriptor = sub_message.DESCRIPTOR
343
+ full_name = message_descriptor.full_name
344
+ if _IsWrapperMessage(message_descriptor):
345
+ js['value'] = self._WrapperMessageToJsonObject(sub_message)
346
+ return js
347
+ if full_name in _WKTJSONMETHODS:
348
+ js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0], sub_message)(
349
+ self
350
+ )
351
+ return js
352
+ return self._RegularMessageToJsonObject(sub_message, js)
353
+
354
+ def _GenericMessageToJsonObject(self, message):
355
+ """Converts message according to Proto3 JSON Specification."""
356
+ # Duration, Timestamp and FieldMask have ToJsonString method to do the
357
+ # convert. Users can also call the method directly.
358
+ return message.ToJsonString()
359
+
360
+ def _ValueMessageToJsonObject(self, message):
361
+ """Converts Value message according to Proto3 JSON Specification."""
362
+ which = message.WhichOneof('kind')
363
+ # If the Value message is not set treat as null_value when serialize
364
+ # to JSON. The parse back result will be different from original message.
365
+ if which is None or which == 'null_value':
366
+ return None
367
+ if which == 'list_value':
368
+ return self._ListValueMessageToJsonObject(message.list_value)
369
+ if which == 'number_value':
370
+ value = message.number_value
371
+ if math.isinf(value):
372
+ raise ValueError(
373
+ 'Fail to serialize Infinity for Value.number_value, '
374
+ 'which would parse as string_value'
375
+ )
376
+ if math.isnan(value):
377
+ raise ValueError(
378
+ 'Fail to serialize NaN for Value.number_value, '
379
+ 'which would parse as string_value'
380
+ )
381
+ else:
382
+ value = getattr(message, which)
383
+ oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
384
+ return self._FieldToJsonObject(oneof_descriptor, value)
385
+
386
+ def _ListValueMessageToJsonObject(self, message):
387
+ """Converts ListValue message according to Proto3 JSON Specification."""
388
+ return [self._ValueMessageToJsonObject(value) for value in message.values]
389
+
390
+ def _StructMessageToJsonObject(self, message):
391
+ """Converts Struct message according to Proto3 JSON Specification."""
392
+ fields = message.fields
393
+ ret = {}
394
+ for key in fields:
395
+ ret[key] = self._ValueMessageToJsonObject(fields[key])
396
+ return ret
397
+
398
+ def _WrapperMessageToJsonObject(self, message):
399
+ return self._FieldToJsonObject(
400
+ message.DESCRIPTOR.fields_by_name['value'], message.value
401
+ )
402
+
403
+
404
+ def _IsWrapperMessage(message_descriptor):
405
+ return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
406
+
407
+
408
+ def _DuplicateChecker(js):
409
+ result = {}
410
+ for name, value in js:
411
+ if name in result:
412
+ raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
413
+ result[name] = value
414
+ return result
415
+
416
+
417
+ def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
418
+ """Creates a message from a type URL."""
419
+ db = symbol_database.Default()
420
+ pool = db.pool if descriptor_pool is None else descriptor_pool
421
+ type_name = type_url.split('/')[-1]
422
+ try:
423
+ message_descriptor = pool.FindMessageTypeByName(type_name)
424
+ except KeyError as e:
425
+ raise TypeError(
426
+ 'Can not find message descriptor by type_url: {0}'.format(type_url)
427
+ ) from e
428
+ message_class = message_factory.GetMessageClass(message_descriptor)
429
+ return message_class()
430
+
431
+
432
+ def Parse(
433
+ text,
434
+ message,
435
+ ignore_unknown_fields=False,
436
+ descriptor_pool=None,
437
+ max_recursion_depth=100,
438
+ ):
439
+ """Parses a JSON representation of a protocol message into a message.
440
+
441
+ Args:
442
+ text: Message JSON representation.
443
+ message: A protocol buffer message to merge into.
444
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
445
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
446
+ default.
447
+ max_recursion_depth: max recursion depth of JSON message to be deserialized.
448
+ JSON messages over this depth will fail to be deserialized. Default value
449
+ is 100.
450
+
451
+ Returns:
452
+ The same message passed as argument.
453
+
454
+ Raises::
455
+ ParseError: On JSON parsing problems.
456
+ """
457
+ if not isinstance(text, str):
458
+ text = text.decode('utf-8')
459
+
460
+ try:
461
+ js = json.loads(text, object_pairs_hook=_DuplicateChecker)
462
+ except Exception as e:
463
+ raise ParseError('Failed to load JSON: {0}.'.format(str(e))) from e
464
+
465
+ try:
466
+ return ParseDict(
467
+ js, message, ignore_unknown_fields, descriptor_pool, max_recursion_depth
468
+ )
469
+ except ParseError as e:
470
+ raise e
471
+ except Exception as e:
472
+ raise ParseError(
473
+ 'Failed to parse JSON: {0}: {1}.'.format(type(e).__name__, str(e))
474
+ ) from e
475
+
476
+
477
+ def ParseDict(
478
+ js_dict,
479
+ message,
480
+ ignore_unknown_fields=False,
481
+ descriptor_pool=None,
482
+ max_recursion_depth=100,
483
+ ):
484
+ """Parses a JSON dictionary representation into a message.
485
+
486
+ Args:
487
+ js_dict: Dict representation of a JSON message.
488
+ message: A protocol buffer message to merge into.
489
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
490
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
491
+ default.
492
+ max_recursion_depth: max recursion depth of JSON message to be deserialized.
493
+ JSON messages over this depth will fail to be deserialized. Default value
494
+ is 100.
495
+
496
+ Returns:
497
+ The same message passed as argument.
498
+ """
499
+ parser = _Parser(ignore_unknown_fields, descriptor_pool, max_recursion_depth)
500
+ parser.ConvertMessage(js_dict, message, '')
501
+ return message
502
+
503
+
504
+ _INT_OR_FLOAT = (int, float)
505
+ _LIST_LIKE = (list, tuple)
506
+
507
+
508
+ class _Parser(object):
509
+ """JSON format parser for protocol message."""
510
+
511
+ def __init__(
512
+ self, ignore_unknown_fields, descriptor_pool, max_recursion_depth
513
+ ):
514
+ self.ignore_unknown_fields = ignore_unknown_fields
515
+ self.descriptor_pool = descriptor_pool
516
+ self.max_recursion_depth = max_recursion_depth
517
+ self.recursion_depth = 0
518
+
519
+ def ConvertMessage(self, value, message, path):
520
+ """Convert a JSON object into a message.
521
+
522
+ Args:
523
+ value: A JSON object.
524
+ message: A WKT or regular protocol message to record the data.
525
+ path: parent path to log parse error info.
526
+
527
+ Raises:
528
+ ParseError: In case of convert problems.
529
+ """
530
+ self.recursion_depth += 1
531
+ if self.recursion_depth > self.max_recursion_depth:
532
+ raise ParseError(
533
+ 'Message too deep. Max recursion depth is {0}'.format(
534
+ self.max_recursion_depth
535
+ )
536
+ )
537
+ message_descriptor = message.DESCRIPTOR
538
+ full_name = message_descriptor.full_name
539
+ if not path:
540
+ path = message_descriptor.name
541
+ if _IsWrapperMessage(message_descriptor):
542
+ self._ConvertWrapperMessage(value, message, path)
543
+ elif full_name in _WKTJSONMETHODS:
544
+ methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self)
545
+ else:
546
+ self._ConvertFieldValuePair(value, message, path)
547
+ self.recursion_depth -= 1
548
+
549
+ def _ConvertFieldValuePair(self, js, message, path):
550
+ """Convert field value pairs into regular message.
551
+
552
+ Args:
553
+ js: A JSON object to convert the field value pairs.
554
+ message: A regular protocol message to record the data.
555
+ path: parent path to log parse error info.
556
+
557
+ Raises:
558
+ ParseError: In case of problems converting.
559
+ """
560
+ names = []
561
+ message_descriptor = message.DESCRIPTOR
562
+ fields_by_json_name = dict(
563
+ (f.json_name, f) for f in message_descriptor.fields
564
+ )
565
+ for name in js:
566
+ try:
567
+ field = fields_by_json_name.get(name, None)
568
+ if not field:
569
+ field = message_descriptor.fields_by_name.get(name, None)
570
+ if not field and _VALID_EXTENSION_NAME.match(name):
571
+ if not message_descriptor.is_extendable:
572
+ raise ParseError(
573
+ 'Message type {0} does not have extensions at {1}'.format(
574
+ message_descriptor.full_name, path
575
+ )
576
+ )
577
+ identifier = name[1:-1] # strip [] brackets
578
+ # pylint: disable=protected-access
579
+ field = message.Extensions._FindExtensionByName(identifier)
580
+ # pylint: enable=protected-access
581
+ if not field:
582
+ # Try looking for extension by the message type name, dropping the
583
+ # field name following the final . separator in full_name.
584
+ identifier = '.'.join(identifier.split('.')[:-1])
585
+ # pylint: disable=protected-access
586
+ field = message.Extensions._FindExtensionByName(identifier)
587
+ # pylint: enable=protected-access
588
+ if not field:
589
+ if self.ignore_unknown_fields:
590
+ continue
591
+ raise ParseError(
592
+ (
593
+ 'Message type "{0}" has no field named "{1}" at "{2}".\n'
594
+ ' Available Fields(except extensions): "{3}"'
595
+ ).format(
596
+ message_descriptor.full_name,
597
+ name,
598
+ path,
599
+ [f.json_name for f in message_descriptor.fields],
600
+ )
601
+ )
602
+ if name in names:
603
+ raise ParseError(
604
+ 'Message type "{0}" should not have multiple '
605
+ '"{1}" fields at "{2}".'.format(
606
+ message.DESCRIPTOR.full_name, name, path
607
+ )
608
+ )
609
+ names.append(name)
610
+ value = js[name]
611
+ # Check no other oneof field is parsed.
612
+ if field.containing_oneof is not None and value is not None:
613
+ oneof_name = field.containing_oneof.name
614
+ if oneof_name in names:
615
+ raise ParseError(
616
+ 'Message type "{0}" should not have multiple '
617
+ '"{1}" oneof fields at "{2}".'.format(
618
+ message.DESCRIPTOR.full_name, oneof_name, path
619
+ )
620
+ )
621
+ names.append(oneof_name)
622
+
623
+ if value is None:
624
+ if (
625
+ field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
626
+ and field.message_type.full_name == 'google.protobuf.Value'
627
+ ):
628
+ sub_message = getattr(message, field.name)
629
+ sub_message.null_value = 0
630
+ elif (
631
+ field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
632
+ and field.enum_type.full_name == 'google.protobuf.NullValue'
633
+ ):
634
+ setattr(message, field.name, 0)
635
+ else:
636
+ message.ClearField(field.name)
637
+ continue
638
+
639
+ # Parse field value.
640
+ if _IsMapEntry(field):
641
+ message.ClearField(field.name)
642
+ self._ConvertMapFieldValue(
643
+ value, message, field, '{0}.{1}'.format(path, name)
644
+ )
645
+ elif field.is_repeated:
646
+ message.ClearField(field.name)
647
+ if not isinstance(value, _LIST_LIKE):
648
+ raise ParseError(
649
+ 'repeated field {0} must be in [] which is {1} at {2}'.format(
650
+ name, value, path
651
+ )
652
+ )
653
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
654
+ # Repeated message field.
655
+ for index, item in enumerate(value):
656
+ sub_message = getattr(message, field.name).add()
657
+ # None is a null_value in Value.
658
+ if (
659
+ item is None
660
+ and sub_message.DESCRIPTOR.full_name
661
+ != 'google.protobuf.Value'
662
+ ):
663
+ raise ParseError(
664
+ 'null is not allowed to be used as an element'
665
+ ' in a repeated field at {0}.{1}[{2}]'.format(
666
+ path, name, index
667
+ )
668
+ )
669
+ self.ConvertMessage(
670
+ item, sub_message, '{0}.{1}[{2}]'.format(path, name, index)
671
+ )
672
+ else:
673
+ # Repeated scalar field.
674
+ for index, item in enumerate(value):
675
+ if item is None:
676
+ raise ParseError(
677
+ 'null is not allowed to be used as an element'
678
+ ' in a repeated field at {0}.{1}[{2}]'.format(
679
+ path, name, index
680
+ )
681
+ )
682
+ self._ConvertAndAppendScalar(
683
+ message, field, item, '{0}.{1}[{2}]'.format(path, name, index)
684
+ )
685
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
686
+ if field.is_extension:
687
+ sub_message = message.Extensions[field]
688
+ else:
689
+ sub_message = getattr(message, field.name)
690
+ sub_message.SetInParent()
691
+ self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name))
692
+ else:
693
+ if field.is_extension:
694
+ self._ConvertAndSetScalarExtension(
695
+ message, field, value, '{0}.{1}'.format(path, name)
696
+ )
697
+ else:
698
+ self._ConvertAndSetScalar(
699
+ message, field, value, '{0}.{1}'.format(path, name)
700
+ )
701
+ except ParseError as e:
702
+ if field and field.containing_oneof is None:
703
+ raise ParseError(
704
+ 'Failed to parse {0} field: {1}.'.format(name, e)
705
+ ) from e
706
+ else:
707
+ raise ParseError(str(e)) from e
708
+ except ValueError as e:
709
+ raise ParseError(
710
+ 'Failed to parse {0} field: {1}.'.format(name, e)
711
+ ) from e
712
+ except TypeError as e:
713
+ raise ParseError(
714
+ 'Failed to parse {0} field: {1}.'.format(name, e)
715
+ ) from e
716
+
717
+ def _ConvertAnyMessage(self, value, message, path):
718
+ """Convert a JSON representation into Any message."""
719
+ if isinstance(value, dict) and not value:
720
+ return
721
+ try:
722
+ type_url = value['@type']
723
+ except KeyError as e:
724
+ raise ParseError(
725
+ '@type is missing when parsing any message at {0}'.format(path)
726
+ ) from e
727
+
728
+ try:
729
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
730
+ except TypeError as e:
731
+ raise ParseError('{0} at {1}'.format(e, path)) from e
732
+ message_descriptor = sub_message.DESCRIPTOR
733
+ full_name = message_descriptor.full_name
734
+ if _IsWrapperMessage(message_descriptor):
735
+ self._ConvertWrapperMessage(
736
+ value['value'], sub_message, '{0}.value'.format(path)
737
+ )
738
+ elif full_name in _WKTJSONMETHODS:
739
+ methodcaller(
740
+ _WKTJSONMETHODS[full_name][1],
741
+ value['value'],
742
+ sub_message,
743
+ '{0}.value'.format(path),
744
+ )(self)
745
+ else:
746
+ del value['@type']
747
+ try:
748
+ self._ConvertFieldValuePair(value, sub_message, path)
749
+ finally:
750
+ value['@type'] = type_url
751
+ # Sets Any message
752
+ message.value = sub_message.SerializeToString()
753
+ message.type_url = type_url
754
+
755
+ def _ConvertGenericMessage(self, value, message, path):
756
+ """Convert a JSON representation into message with FromJsonString."""
757
+ # Duration, Timestamp, FieldMask have a FromJsonString method to do the
758
+ # conversion. Users can also call the method directly.
759
+ try:
760
+ message.FromJsonString(value)
761
+ except ValueError as e:
762
+ raise ParseError('{0} at {1}'.format(e, path)) from e
763
+
764
+ def _ConvertValueMessage(self, value, message, path):
765
+ """Convert a JSON representation into Value message."""
766
+ if isinstance(value, dict):
767
+ self._ConvertStructMessage(value, message.struct_value, path)
768
+ elif isinstance(value, _LIST_LIKE):
769
+ self._ConvertListOrTupleValueMessage(value, message.list_value, path)
770
+ elif value is None:
771
+ message.null_value = 0
772
+ elif isinstance(value, bool):
773
+ message.bool_value = value
774
+ elif isinstance(value, str):
775
+ message.string_value = value
776
+ elif isinstance(value, _INT_OR_FLOAT):
777
+ message.number_value = value
778
+ else:
779
+ raise ParseError(
780
+ 'Value {0} has unexpected type {1} at {2}'.format(
781
+ value, type(value), path
782
+ )
783
+ )
784
+
785
+ def _ConvertListOrTupleValueMessage(self, value, message, path):
786
+ """Convert a JSON representation into ListValue message."""
787
+ if not isinstance(value, _LIST_LIKE):
788
+ raise ParseError(
789
+ 'ListValue must be in [] which is {0} at {1}'.format(value, path)
790
+ )
791
+ message.ClearField('values')
792
+ for index, item in enumerate(value):
793
+ self._ConvertValueMessage(
794
+ item, message.values.add(), '{0}[{1}]'.format(path, index)
795
+ )
796
+
797
+ def _ConvertStructMessage(self, value, message, path):
798
+ """Convert a JSON representation into Struct message."""
799
+ if not isinstance(value, dict):
800
+ raise ParseError(
801
+ 'Struct must be in a dict which is {0} at {1}'.format(value, path)
802
+ )
803
+ # Clear will mark the struct as modified so it will be created even if
804
+ # there are no values.
805
+ message.Clear()
806
+ for key in value:
807
+ self._ConvertValueMessage(
808
+ value[key], message.fields[key], '{0}.{1}'.format(path, key)
809
+ )
810
+ return
811
+
812
+ def _ConvertWrapperMessage(self, value, message, path):
813
+ """Convert a JSON representation into Wrapper message."""
814
+ field = message.DESCRIPTOR.fields_by_name['value']
815
+ self._ConvertAndSetScalar(
816
+ message, field, value, path='{0}.value'.format(path)
817
+ )
818
+
819
+ def _ConvertMapFieldValue(self, value, message, field, path):
820
+ """Convert map field value for a message map field.
821
+
822
+ Args:
823
+ value: A JSON object to convert the map field value.
824
+ message: A protocol message to record the converted data.
825
+ field: The descriptor of the map field to be converted.
826
+ path: parent path to log parse error info.
827
+
828
+ Raises:
829
+ ParseError: In case of convert problems.
830
+ """
831
+ if not isinstance(value, dict):
832
+ raise ParseError(
833
+ 'Map field {0} must be in a dict which is {1} at {2}'.format(
834
+ field.name, value, path
835
+ )
836
+ )
837
+ key_field = field.message_type.fields_by_name['key']
838
+ value_field = field.message_type.fields_by_name['value']
839
+ for key in value:
840
+ key_value = _ConvertScalarFieldValue(
841
+ key, key_field, '{0}.key'.format(path), True
842
+ )
843
+ if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
844
+ self.ConvertMessage(
845
+ value[key],
846
+ getattr(message, field.name)[key_value],
847
+ '{0}[{1}]'.format(path, key_value),
848
+ )
849
+ else:
850
+ self._ConvertAndSetScalarToMapKey(
851
+ message,
852
+ field,
853
+ key_value,
854
+ value[key],
855
+ path='{0}[{1}]'.format(path, key_value),
856
+ )
857
+
858
+ def _ConvertAndSetScalarExtension(
859
+ self, message, extension_field, js_value, path
860
+ ):
861
+ """Convert scalar from js_value and assign it to message.Extensions[extension_field]."""
862
+ try:
863
+ message.Extensions[extension_field] = _ConvertScalarFieldValue(
864
+ js_value, extension_field, path
865
+ )
866
+ except EnumStringValueParseError:
867
+ if not self.ignore_unknown_fields:
868
+ raise
869
+
870
+ def _ConvertAndSetScalar(self, message, field, js_value, path):
871
+ """Convert scalar from js_value and assign it to message.field."""
872
+ try:
873
+ setattr(
874
+ message, field.name, _ConvertScalarFieldValue(js_value, field, path)
875
+ )
876
+ except EnumStringValueParseError:
877
+ if not self.ignore_unknown_fields:
878
+ raise
879
+
880
+ def _ConvertAndAppendScalar(self, message, repeated_field, js_value, path):
881
+ """Convert scalar from js_value and append it to message.repeated_field."""
882
+ try:
883
+ getattr(message, repeated_field.name).append(
884
+ _ConvertScalarFieldValue(js_value, repeated_field, path)
885
+ )
886
+ except EnumStringValueParseError:
887
+ if not self.ignore_unknown_fields:
888
+ raise
889
+
890
+ def _ConvertAndSetScalarToMapKey(
891
+ self, message, map_field, converted_key, js_value, path
892
+ ):
893
+ """Convert scalar from 'js_value' and add it to message.map_field[converted_key]."""
894
+ try:
895
+ getattr(message, map_field.name)[converted_key] = (
896
+ _ConvertScalarFieldValue(
897
+ js_value,
898
+ map_field.message_type.fields_by_name['value'],
899
+ path,
900
+ )
901
+ )
902
+ except EnumStringValueParseError:
903
+ if not self.ignore_unknown_fields:
904
+ raise
905
+
906
+
907
+ def _ConvertScalarFieldValue(value, field, path, require_str=False):
908
+ """Convert a single scalar field value.
909
+
910
+ Args:
911
+ value: A scalar value to convert the scalar field value.
912
+ field: The descriptor of the field to convert.
913
+ path: parent path to log parse error info.
914
+ require_str: If True, the field value must be a str.
915
+
916
+ Returns:
917
+ The converted scalar field value
918
+
919
+ Raises:
920
+ ParseError: In case of convert problems.
921
+ EnumStringValueParseError: In case of unknown enum string value.
922
+ """
923
+ try:
924
+ if field.cpp_type in _INT_TYPES:
925
+ return _ConvertInteger(value)
926
+ elif field.cpp_type in _FLOAT_TYPES:
927
+ return _ConvertFloat(value, field)
928
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
929
+ return _ConvertBool(value, require_str)
930
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
931
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
932
+ if isinstance(value, str):
933
+ encoded = value.encode('utf-8')
934
+ else:
935
+ encoded = value
936
+ # Add extra padding '='
937
+ padded_value = encoded + b'=' * (4 - len(encoded) % 4)
938
+ return base64.urlsafe_b64decode(padded_value)
939
+ else:
940
+ # Checking for unpaired surrogates appears to be unreliable,
941
+ # depending on the specific Python version, so we check manually.
942
+ if _UNPAIRED_SURROGATE_PATTERN.search(value):
943
+ raise ParseError('Unpaired surrogate')
944
+ return value
945
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
946
+ # Convert an enum value.
947
+ enum_value = field.enum_type.values_by_name.get(value, None)
948
+ if enum_value is None:
949
+ try:
950
+ number = int(value)
951
+ enum_value = field.enum_type.values_by_number.get(number, None)
952
+ except ValueError as e:
953
+ # Since parsing to integer failed and lookup in values_by_name didn't
954
+ # find this name, we have an enum string value which is unknown.
955
+ raise EnumStringValueParseError(
956
+ 'Invalid enum value {0} for enum type {1}'.format(
957
+ value, field.enum_type.full_name
958
+ )
959
+ ) from e
960
+ if enum_value is None:
961
+ if field.enum_type.is_closed:
962
+ raise ParseError(
963
+ 'Invalid enum value {0} for enum type {1}'.format(
964
+ value, field.enum_type.full_name
965
+ )
966
+ )
967
+ else:
968
+ return number
969
+ return enum_value.number
970
+ except EnumStringValueParseError as e:
971
+ raise EnumStringValueParseError('{0} at {1}'.format(e, path)) from e
972
+ except ParseError as e:
973
+ raise ParseError('{0} at {1}'.format(e, path)) from e
974
+
975
+
976
+ def _ConvertInteger(value):
977
+ """Convert an integer.
978
+
979
+ Args:
980
+ value: A scalar value to convert.
981
+
982
+ Returns:
983
+ The integer value.
984
+
985
+ Raises:
986
+ ParseError: If an integer couldn't be consumed.
987
+ """
988
+ if isinstance(value, float) and not value.is_integer():
989
+ raise ParseError("Couldn't parse integer: {0}".format(value))
990
+
991
+ if isinstance(value, str) and value.find(' ') != -1:
992
+ raise ParseError('Couldn\'t parse integer: "{0}"'.format(value))
993
+
994
+ if isinstance(value, bool):
995
+ raise ParseError(
996
+ 'Bool value {0} is not acceptable for integer field'.format(value)
997
+ )
998
+
999
+ try:
1000
+ return int(value)
1001
+ except ValueError as e:
1002
+ # Attempt to parse as an integer-valued float.
1003
+ try:
1004
+ f = float(value)
1005
+ except ValueError:
1006
+ # Raise the original exception for the int parse.
1007
+ raise e # pylint: disable=raise-missing-from
1008
+ if not f.is_integer():
1009
+ raise ParseError(
1010
+ 'Couldn\'t parse non-integer string: "{0}"'.format(value)
1011
+ ) from e
1012
+ return int(f)
1013
+
1014
+
1015
+ def _ConvertFloat(value, field):
1016
+ """Convert an floating point number."""
1017
+ if isinstance(value, float):
1018
+ if math.isnan(value):
1019
+ raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead')
1020
+ if math.isinf(value):
1021
+ if value > 0:
1022
+ raise ParseError(
1023
+ "Couldn't parse Infinity or value too large, "
1024
+ 'use quoted "Infinity" instead'
1025
+ )
1026
+ else:
1027
+ raise ParseError(
1028
+ "Couldn't parse -Infinity or value too small, "
1029
+ 'use quoted "-Infinity" instead'
1030
+ )
1031
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
1032
+ # pylint: disable=protected-access
1033
+ if value > type_checkers._FLOAT_MAX:
1034
+ raise ParseError('Float value too large')
1035
+ # pylint: disable=protected-access
1036
+ if value < type_checkers._FLOAT_MIN:
1037
+ raise ParseError('Float value too small')
1038
+ if value == 'nan':
1039
+ raise ParseError('Couldn\'t parse float "nan", use "NaN" instead')
1040
+ try:
1041
+ # Assume Python compatible syntax.
1042
+ return float(value)
1043
+ except ValueError as e:
1044
+ # Check alternative spellings.
1045
+ if value == _NEG_INFINITY:
1046
+ return float('-inf')
1047
+ elif value == _INFINITY:
1048
+ return float('inf')
1049
+ elif value == _NAN:
1050
+ return float('nan')
1051
+ else:
1052
+ raise ParseError("Couldn't parse float: {0}".format(value)) from e
1053
+
1054
+
1055
+ def _ConvertBool(value, require_str):
1056
+ """Convert a boolean value.
1057
+
1058
+ Args:
1059
+ value: A scalar value to convert.
1060
+ require_str: If True, value must be a str.
1061
+
1062
+ Returns:
1063
+ The bool parsed.
1064
+
1065
+ Raises:
1066
+ ParseError: If a boolean value couldn't be consumed.
1067
+ """
1068
+ if require_str:
1069
+ if value == 'true':
1070
+ return True
1071
+ elif value == 'false':
1072
+ return False
1073
+ else:
1074
+ raise ParseError('Expected "true" or "false", not {0}'.format(value))
1075
+
1076
+ if not isinstance(value, bool):
1077
+ raise ParseError('Expected true or false without quotes')
1078
+ return value
1079
+
1080
+
1081
+ _WKTJSONMETHODS = {
1082
+ 'google.protobuf.Any': ['_AnyMessageToJsonObject', '_ConvertAnyMessage'],
1083
+ 'google.protobuf.Duration': [
1084
+ '_GenericMessageToJsonObject',
1085
+ '_ConvertGenericMessage',
1086
+ ],
1087
+ 'google.protobuf.FieldMask': [
1088
+ '_GenericMessageToJsonObject',
1089
+ '_ConvertGenericMessage',
1090
+ ],
1091
+ 'google.protobuf.ListValue': [
1092
+ '_ListValueMessageToJsonObject',
1093
+ '_ConvertListOrTupleValueMessage',
1094
+ ],
1095
+ 'google.protobuf.Struct': [
1096
+ '_StructMessageToJsonObject',
1097
+ '_ConvertStructMessage',
1098
+ ],
1099
+ 'google.protobuf.Timestamp': [
1100
+ '_GenericMessageToJsonObject',
1101
+ '_ConvertGenericMessage',
1102
+ ],
1103
+ 'google.protobuf.Value': [
1104
+ '_ValueMessageToJsonObject',
1105
+ '_ConvertValueMessage',
1106
+ ],
1107
+ }
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/message.py ADDED
@@ -0,0 +1,448 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ # TODO: We should just make these methods all "pure-virtual" and move
9
+ # all implementation out, into reflection.py for now.
10
+
11
+
12
+ """Contains an abstract base class for protocol messages."""
13
+
14
+ __author__ = 'robinson@google.com (Will Robinson)'
15
+
16
+ _INCONSISTENT_MESSAGE_ATTRIBUTES = ('Extensions',)
17
+
18
+
19
+ class Error(Exception):
20
+ """Base error type for this module."""
21
+ pass
22
+
23
+
24
+ class DecodeError(Error):
25
+ """Exception raised when deserializing messages."""
26
+ pass
27
+
28
+
29
+ class EncodeError(Error):
30
+ """Exception raised when serializing messages."""
31
+ pass
32
+
33
+
34
+ class Message(object):
35
+
36
+ """Abstract base class for protocol messages.
37
+
38
+ Protocol message classes are almost always generated by the protocol
39
+ compiler. These generated types subclass Message and implement the methods
40
+ shown below.
41
+ """
42
+
43
+ # TODO: Link to an HTML document here.
44
+
45
+ # TODO: Document that instances of this class will also
46
+ # have an Extensions attribute with __getitem__ and __setitem__.
47
+ # Again, not sure how to best convey this.
48
+
49
+ # TODO: Document these fields and methods.
50
+
51
+ __slots__ = []
52
+
53
+ #: The :class:`google.protobuf.Descriptor`
54
+ # for this message type.
55
+ DESCRIPTOR = None
56
+
57
+ def __deepcopy__(self, memo=None):
58
+ clone = type(self)()
59
+ clone.MergeFrom(self)
60
+ return clone
61
+
62
+ def __dir__(self):
63
+ """Provides the list of all accessible Message attributes."""
64
+ message_attributes = set(super().__dir__())
65
+
66
+ # TODO: Remove this once the UPB implementation is improved.
67
+ # The UPB proto implementation currently doesn't provide proto fields as
68
+ # attributes and they have to added.
69
+ if self.DESCRIPTOR is not None:
70
+ for field in self.DESCRIPTOR.fields:
71
+ message_attributes.add(field.name)
72
+
73
+ # The Fast C++ proto implementation provides inaccessible attributes that
74
+ # have to be removed.
75
+ for attribute in _INCONSISTENT_MESSAGE_ATTRIBUTES:
76
+ if attribute not in message_attributes:
77
+ continue
78
+ try:
79
+ getattr(self, attribute)
80
+ except AttributeError:
81
+ message_attributes.remove(attribute)
82
+
83
+ return sorted(message_attributes)
84
+
85
+ def __eq__(self, other_msg):
86
+ """Recursively compares two messages by value and structure."""
87
+ raise NotImplementedError
88
+
89
+ def __ne__(self, other_msg):
90
+ # Can't just say self != other_msg, since that would infinitely recurse. :)
91
+ return not self == other_msg
92
+
93
+ def __hash__(self):
94
+ raise TypeError('unhashable object')
95
+
96
+ def __str__(self):
97
+ """Outputs a human-readable representation of the message."""
98
+ raise NotImplementedError
99
+
100
+ def __unicode__(self):
101
+ """Outputs a human-readable representation of the message."""
102
+ raise NotImplementedError
103
+
104
+ def __contains__(self, field_name_or_key):
105
+ """Checks if a certain field is set for the message.
106
+
107
+ Has presence fields return true if the field is set, false if the field is
108
+ not set. Fields without presence do raise `ValueError` (this includes
109
+ repeated fields, map fields, and implicit presence fields).
110
+
111
+ If field_name is not defined in the message descriptor, `ValueError` will
112
+ be raised.
113
+ Note: WKT Struct checks if the key is contained in fields. ListValue checks
114
+ if the item is contained in the list.
115
+
116
+ Args:
117
+ field_name_or_key: For Struct, the key (str) of the fields map. For
118
+ ListValue, any type that may be contained in the list. For other
119
+ messages, name of the field (str) to check for presence.
120
+
121
+ Returns:
122
+ bool: For Struct, whether the item is contained in fields. For ListValue,
123
+ whether the item is contained in the list. For other message,
124
+ whether a value has been set for the named field.
125
+
126
+ Raises:
127
+ ValueError: For normal messages, if the `field_name_or_key` is not a
128
+ member of this message or `field_name_or_key` is not a string.
129
+ """
130
+ raise NotImplementedError
131
+
132
+ def MergeFrom(self, other_msg):
133
+ """Merges the contents of the specified message into current message.
134
+
135
+ This method merges the contents of the specified message into the current
136
+ message. Singular fields that are set in the specified message overwrite
137
+ the corresponding fields in the current message. Repeated fields are
138
+ appended. Singular sub-messages and groups are recursively merged.
139
+
140
+ Args:
141
+ other_msg (Message): A message to merge into the current message.
142
+ """
143
+ raise NotImplementedError
144
+
145
+ def CopyFrom(self, other_msg):
146
+ """Copies the content of the specified message into the current message.
147
+
148
+ The method clears the current message and then merges the specified
149
+ message using MergeFrom.
150
+
151
+ Args:
152
+ other_msg (Message): A message to copy into the current one.
153
+ """
154
+ if self is other_msg:
155
+ return
156
+ self.Clear()
157
+ self.MergeFrom(other_msg)
158
+
159
+ def Clear(self):
160
+ """Clears all data that was set in the message."""
161
+ raise NotImplementedError
162
+
163
+ def SetInParent(self):
164
+ """Mark this as present in the parent.
165
+
166
+ This normally happens automatically when you assign a field of a
167
+ sub-message, but sometimes you want to make the sub-message
168
+ present while keeping it empty. If you find yourself using this,
169
+ you may want to reconsider your design.
170
+ """
171
+ raise NotImplementedError
172
+
173
+ def IsInitialized(self):
174
+ """Checks if the message is initialized.
175
+
176
+ Returns:
177
+ bool: The method returns True if the message is initialized (i.e. all of
178
+ its required fields are set).
179
+ """
180
+ raise NotImplementedError
181
+
182
+ # TODO: MergeFromString() should probably return None and be
183
+ # implemented in terms of a helper that returns the # of bytes read. Our
184
+ # deserialization routines would use the helper when recursively
185
+ # deserializing, but the end user would almost always just want the no-return
186
+ # MergeFromString().
187
+
188
+ def MergeFromString(self, serialized):
189
+ """Merges serialized protocol buffer data into this message.
190
+
191
+ When we find a field in `serialized` that is already present
192
+ in this message:
193
+
194
+ - If it's a "repeated" field, we append to the end of our list.
195
+ - Else, if it's a scalar, we overwrite our field.
196
+ - Else, (it's a nonrepeated composite), we recursively merge
197
+ into the existing composite.
198
+
199
+ Args:
200
+ serialized (bytes): Any object that allows us to call
201
+ ``memoryview(serialized)`` to access a string of bytes using the
202
+ buffer interface.
203
+
204
+ Returns:
205
+ int: The number of bytes read from `serialized`.
206
+ For non-group messages, this will always be `len(serialized)`,
207
+ but for messages which are actually groups, this will
208
+ generally be less than `len(serialized)`, since we must
209
+ stop when we reach an ``END_GROUP`` tag. Note that if
210
+ we *do* stop because of an ``END_GROUP`` tag, the number
211
+ of bytes returned does not include the bytes
212
+ for the ``END_GROUP`` tag information.
213
+
214
+ Raises:
215
+ DecodeError: if the input cannot be parsed.
216
+ """
217
+ # TODO: Document handling of unknown fields.
218
+ # TODO: When we switch to a helper, this will return None.
219
+ raise NotImplementedError
220
+
221
+ def ParseFromString(self, serialized):
222
+ """Parse serialized protocol buffer data in binary form into this message.
223
+
224
+ Like :func:`MergeFromString()`, except we clear the object first.
225
+
226
+ Raises:
227
+ message.DecodeError if the input cannot be parsed.
228
+ """
229
+ self.Clear()
230
+ return self.MergeFromString(serialized)
231
+
232
+ def SerializeToString(self, **kwargs):
233
+ """Serializes the protocol message to a binary string.
234
+
235
+ Keyword Args:
236
+ deterministic (bool): If true, requests deterministic serialization
237
+ of the protobuf, with predictable ordering of map keys.
238
+
239
+ Returns:
240
+ A binary string representation of the message if all of the required
241
+ fields in the message are set (i.e. the message is initialized).
242
+
243
+ Raises:
244
+ EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
245
+ """
246
+ raise NotImplementedError
247
+
248
+ def SerializePartialToString(self, **kwargs):
249
+ """Serializes the protocol message to a binary string.
250
+
251
+ This method is similar to SerializeToString but doesn't check if the
252
+ message is initialized.
253
+
254
+ Keyword Args:
255
+ deterministic (bool): If true, requests deterministic serialization
256
+ of the protobuf, with predictable ordering of map keys.
257
+
258
+ Returns:
259
+ bytes: A serialized representation of the partial message.
260
+ """
261
+ raise NotImplementedError
262
+
263
+ # TODO: Decide whether we like these better
264
+ # than auto-generated has_foo() and clear_foo() methods
265
+ # on the instances themselves. This way is less consistent
266
+ # with C++, but it makes reflection-type access easier and
267
+ # reduces the number of magically autogenerated things.
268
+ #
269
+ # TODO: Be sure to document (and test) exactly
270
+ # which field names are accepted here. Are we case-sensitive?
271
+ # What do we do with fields that share names with Python keywords
272
+ # like 'lambda' and 'yield'?
273
+ #
274
+ # nnorwitz says:
275
+ # """
276
+ # Typically (in python), an underscore is appended to names that are
277
+ # keywords. So they would become lambda_ or yield_.
278
+ # """
279
+ def ListFields(self):
280
+ """Returns a list of (FieldDescriptor, value) tuples for present fields.
281
+
282
+ A message field is non-empty if HasField() would return true. A singular
283
+ primitive field is non-empty if HasField() would return true in proto2 or it
284
+ is non zero in proto3. A repeated field is non-empty if it contains at least
285
+ one element. The fields are ordered by field number.
286
+
287
+ Returns:
288
+ list[tuple(FieldDescriptor, value)]: field descriptors and values
289
+ for all fields in the message which are not empty. The values vary by
290
+ field type.
291
+ """
292
+ raise NotImplementedError
293
+
294
+ def HasField(self, field_name):
295
+ """Checks if a certain field is set for the message.
296
+
297
+ For a oneof group, checks if any field inside is set. Note that if the
298
+ field_name is not defined in the message descriptor, :exc:`ValueError` will
299
+ be raised.
300
+
301
+ Args:
302
+ field_name (str): The name of the field to check for presence.
303
+
304
+ Returns:
305
+ bool: Whether a value has been set for the named field.
306
+
307
+ Raises:
308
+ ValueError: if the `field_name` is not a member of this message.
309
+ """
310
+ raise NotImplementedError
311
+
312
+ def ClearField(self, field_name):
313
+ """Clears the contents of a given field.
314
+
315
+ Inside a oneof group, clears the field set. If the name neither refers to a
316
+ defined field or oneof group, :exc:`ValueError` is raised.
317
+
318
+ Args:
319
+ field_name (str): The name of the field to check for presence.
320
+
321
+ Raises:
322
+ ValueError: if the `field_name` is not a member of this message.
323
+ """
324
+ raise NotImplementedError
325
+
326
+ def WhichOneof(self, oneof_group):
327
+ """Returns the name of the field that is set inside a oneof group.
328
+
329
+ If no field is set, returns None.
330
+
331
+ Args:
332
+ oneof_group (str): the name of the oneof group to check.
333
+
334
+ Returns:
335
+ str or None: The name of the group that is set, or None.
336
+
337
+ Raises:
338
+ ValueError: no group with the given name exists
339
+ """
340
+ raise NotImplementedError
341
+
342
+ def HasExtension(self, field_descriptor):
343
+ """Checks if a certain extension is present for this message.
344
+
345
+ Extensions are retrieved using the :attr:`Extensions` mapping (if present).
346
+
347
+ Args:
348
+ field_descriptor: The field descriptor for the extension to check.
349
+
350
+ Returns:
351
+ bool: Whether the extension is present for this message.
352
+
353
+ Raises:
354
+ KeyError: if the extension is repeated. Similar to repeated fields,
355
+ there is no separate notion of presence: a "not present" repeated
356
+ extension is an empty list.
357
+ """
358
+ raise NotImplementedError
359
+
360
+ def ClearExtension(self, field_descriptor):
361
+ """Clears the contents of a given extension.
362
+
363
+ Args:
364
+ field_descriptor: The field descriptor for the extension to clear.
365
+ """
366
+ raise NotImplementedError
367
+
368
+ def UnknownFields(self):
369
+ """Returns the UnknownFieldSet.
370
+
371
+ Returns:
372
+ UnknownFieldSet: The unknown fields stored in this message.
373
+ """
374
+ raise NotImplementedError
375
+
376
+ def DiscardUnknownFields(self):
377
+ """Clears all fields in the :class:`UnknownFieldSet`.
378
+
379
+ This operation is recursive for nested message.
380
+ """
381
+ raise NotImplementedError
382
+
383
+ def ByteSize(self):
384
+ """Returns the serialized size of this message.
385
+
386
+ Recursively calls ByteSize() on all contained messages.
387
+
388
+ Returns:
389
+ int: The number of bytes required to serialize this message.
390
+ """
391
+ raise NotImplementedError
392
+
393
+ @classmethod
394
+ def FromString(cls, s):
395
+ raise NotImplementedError
396
+
397
+ def _SetListener(self, message_listener):
398
+ """Internal method used by the protocol message implementation.
399
+ Clients should not call this directly.
400
+
401
+ Sets a listener that this message will call on certain state transitions.
402
+
403
+ The purpose of this method is to register back-edges from children to
404
+ parents at runtime, for the purpose of setting "has" bits and
405
+ byte-size-dirty bits in the parent and ancestor objects whenever a child or
406
+ descendant object is modified.
407
+
408
+ If the client wants to disconnect this Message from the object tree, she
409
+ explicitly sets callback to None.
410
+
411
+ If message_listener is None, unregisters any existing listener. Otherwise,
412
+ message_listener must implement the MessageListener interface in
413
+ internal/message_listener.py, and we discard any listener registered
414
+ via a previous _SetListener() call.
415
+ """
416
+ raise NotImplementedError
417
+
418
+ def __getstate__(self):
419
+ """Support the pickle protocol."""
420
+ return dict(serialized=self.SerializePartialToString())
421
+
422
+ def __setstate__(self, state):
423
+ """Support the pickle protocol."""
424
+ self.__init__()
425
+ serialized = state['serialized']
426
+ # On Python 3, using encoding='latin1' is required for unpickling
427
+ # protos pickled by Python 2.
428
+ if not isinstance(serialized, bytes):
429
+ serialized = serialized.encode('latin1')
430
+ self.ParseFromString(serialized)
431
+
432
+ def __reduce__(self):
433
+ message_descriptor = self.DESCRIPTOR
434
+ if message_descriptor.containing_type is None:
435
+ return type(self), (), self.__getstate__()
436
+ # the message type must be nested.
437
+ # Python does not pickle nested classes; use the symbol_database on the
438
+ # receiving end.
439
+ container = message_descriptor
440
+ return (_InternalConstructMessage, (container.full_name,),
441
+ self.__getstate__())
442
+
443
+
444
+ def _InternalConstructMessage(full_name):
445
+ """Constructs a nested message."""
446
+ from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
447
+
448
+ return symbol_database.Default().GetSymbol(full_name)()
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/message_factory.py ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides a factory class for generating dynamic messages.
9
+
10
+ The easiest way to use this class is if you have access to the FileDescriptor
11
+ protos containing the messages you want to create you can just do the following:
12
+
13
+ message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
14
+ my_proto_instance = message_classes['some.proto.package.MessageName']()
15
+ """
16
+
17
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
18
+
19
+ import warnings
20
+
21
+ from google.protobuf import descriptor_pool
22
+ from google.protobuf import message
23
+ from google.protobuf.internal import api_implementation
24
+
25
+ if api_implementation.Type() == 'python':
26
+ from google.protobuf.internal import python_message as message_impl
27
+ else:
28
+ from google.protobuf.pyext import cpp_message as message_impl # pylint: disable=g-import-not-at-top
29
+
30
+
31
+ # The type of all Message classes.
32
+ _GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
33
+
34
+
35
+ def GetMessageClass(descriptor):
36
+ """Obtains a proto2 message class based on the passed in descriptor.
37
+
38
+ Passing a descriptor with a fully qualified name matching a previous
39
+ invocation will cause the same class to be returned.
40
+
41
+ Args:
42
+ descriptor: The descriptor to build from.
43
+
44
+ Returns:
45
+ A class describing the passed in descriptor.
46
+ """
47
+ concrete_class = getattr(descriptor, '_concrete_class', None)
48
+ if concrete_class:
49
+ return concrete_class
50
+ return _InternalCreateMessageClass(descriptor)
51
+
52
+
53
+ def GetMessageClassesForFiles(files, pool):
54
+ """Gets all the messages from specified files.
55
+
56
+ This will find and resolve dependencies, failing if the descriptor
57
+ pool cannot satisfy them.
58
+
59
+ This will not return the classes for nested types within those classes, for
60
+ those, use GetMessageClass() on the nested types within their containing
61
+ messages.
62
+
63
+ For example, for the message:
64
+
65
+ message NestedTypeMessage {
66
+ message NestedType {
67
+ string data = 1;
68
+ }
69
+ NestedType nested = 1;
70
+ }
71
+
72
+ NestedTypeMessage will be in the result, but not
73
+ NestedTypeMessage.NestedType.
74
+
75
+ Args:
76
+ files: The file names to extract messages from.
77
+ pool: The descriptor pool to find the files including the dependent files.
78
+
79
+ Returns:
80
+ A dictionary mapping proto names to the message classes.
81
+ """
82
+ result = {}
83
+ for file_name in files:
84
+ file_desc = pool.FindFileByName(file_name)
85
+ for desc in file_desc.message_types_by_name.values():
86
+ result[desc.full_name] = GetMessageClass(desc)
87
+
88
+ # While the extension FieldDescriptors are created by the descriptor pool,
89
+ # the python classes created in the factory need them to be registered
90
+ # explicitly, which is done below.
91
+ #
92
+ # The call to RegisterExtension will specifically check if the
93
+ # extension was already registered on the object and either
94
+ # ignore the registration if the original was the same, or raise
95
+ # an error if they were different.
96
+
97
+ for extension in file_desc.extensions_by_name.values():
98
+ _ = GetMessageClass(extension.containing_type)
99
+ if api_implementation.Type() != 'python':
100
+ # TODO: Remove this check here. Duplicate extension
101
+ # register check should be in descriptor_pool.
102
+ if extension is not pool.FindExtensionByNumber(
103
+ extension.containing_type, extension.number
104
+ ):
105
+ raise ValueError('Double registration of Extensions')
106
+ # Recursively load protos for extension field, in order to be able to
107
+ # fully represent the extension. This matches the behavior for regular
108
+ # fields too.
109
+ if extension.message_type:
110
+ GetMessageClass(extension.message_type)
111
+ return result
112
+
113
+
114
+ def _InternalCreateMessageClass(descriptor):
115
+ """Builds a proto2 message class based on the passed in descriptor.
116
+
117
+ Args:
118
+ descriptor: The descriptor to build from.
119
+
120
+ Returns:
121
+ A class describing the passed in descriptor.
122
+ """
123
+ descriptor_name = descriptor.name
124
+ result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
125
+ descriptor_name,
126
+ (message.Message,),
127
+ {
128
+ 'DESCRIPTOR': descriptor,
129
+ # If module not set, it wrongly points to message_factory module.
130
+ '__module__': None,
131
+ },
132
+ )
133
+ for field in descriptor.fields:
134
+ if field.message_type:
135
+ GetMessageClass(field.message_type)
136
+
137
+ for extension in result_class.DESCRIPTOR.extensions:
138
+ extended_class = GetMessageClass(extension.containing_type)
139
+ if api_implementation.Type() != 'python':
140
+ # TODO: Remove this check here. Duplicate extension
141
+ # register check should be in descriptor_pool.
142
+ pool = extension.containing_type.file.pool
143
+ if extension is not pool.FindExtensionByNumber(
144
+ extension.containing_type, extension.number
145
+ ):
146
+ raise ValueError('Double registration of Extensions')
147
+ if extension.message_type:
148
+ GetMessageClass(extension.message_type)
149
+ return result_class
150
+
151
+
152
+ # Deprecated. Please use GetMessageClass() or GetMessageClassesForFiles()
153
+ # method above instead.
154
+ class MessageFactory(object):
155
+ """Factory for creating Proto2 messages from descriptors in a pool."""
156
+
157
+ def __init__(self, pool=None):
158
+ """Initializes a new factory."""
159
+ self.pool = pool or descriptor_pool.DescriptorPool()
160
+
161
+
162
+ def GetMessages(file_protos, pool=None):
163
+ """Builds a dictionary of all the messages available in a set of files.
164
+
165
+ Args:
166
+ file_protos: Iterable of FileDescriptorProto to build messages out of.
167
+ pool: The descriptor pool to add the file protos.
168
+
169
+ Returns:
170
+ A dictionary mapping proto names to the message classes. This will include
171
+ any dependent messages as well as any messages defined in the same file as
172
+ a specified message.
173
+ """
174
+ # The cpp implementation of the protocol buffer library requires to add the
175
+ # message in topological order of the dependency graph.
176
+ des_pool = pool or descriptor_pool.DescriptorPool()
177
+ file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
178
+
179
+ def _AddFile(file_proto):
180
+ for dependency in file_proto.dependency:
181
+ if dependency in file_by_name:
182
+ # Remove from elements to be visited, in order to cut cycles.
183
+ _AddFile(file_by_name.pop(dependency))
184
+ des_pool.Add(file_proto)
185
+
186
+ while file_by_name:
187
+ _AddFile(file_by_name.popitem()[1])
188
+ return GetMessageClassesForFiles(
189
+ [file_proto.name for file_proto in file_protos], des_pool
190
+ )
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains the Nextgen Pythonic protobuf APIs."""
9
+
10
+ import io
11
+ from typing import Text, Type, TypeVar
12
+
13
+ from google.protobuf.internal import decoder
14
+ from google.protobuf.internal import encoder
15
+ from google.protobuf.message import Message
16
+
17
+ _MESSAGE = TypeVar('_MESSAGE', bound='Message')
18
+
19
+
20
+ def serialize(message: _MESSAGE, deterministic: bool = None) -> bytes:
21
+ """Return the serialized proto.
22
+
23
+ Args:
24
+ message: The proto message to be serialized.
25
+ deterministic: If true, requests deterministic serialization
26
+ of the protobuf, with predictable ordering of map keys.
27
+
28
+ Returns:
29
+ A binary bytes representation of the message.
30
+ """
31
+ return message.SerializeToString(deterministic=deterministic)
32
+
33
+
34
+ def parse(message_class: Type[_MESSAGE], payload: bytes) -> _MESSAGE:
35
+ """Given a serialized data in binary form, deserialize it into a Message.
36
+
37
+ Args:
38
+ message_class: The message meta class.
39
+ payload: A serialized bytes in binary form.
40
+
41
+ Returns:
42
+ A new message deserialized from payload.
43
+ """
44
+ new_message = message_class()
45
+ new_message.ParseFromString(payload)
46
+ return new_message
47
+
48
+
49
+ def serialize_length_prefixed(message: _MESSAGE, output: io.BytesIO) -> None:
50
+ """Writes the size of the message as a varint and the serialized message.
51
+
52
+ Writes the size of the message as a varint and then the serialized message.
53
+ This allows more data to be written to the output after the message. Use
54
+ parse_length_prefixed to parse messages written by this method.
55
+
56
+ The output stream must be buffered, e.g. using
57
+ https://docs.python.org/3/library/io.html#buffered-streams.
58
+
59
+ Example usage:
60
+ out = io.BytesIO()
61
+ for msg in message_list:
62
+ proto.serialize_length_prefixed(msg, out)
63
+
64
+ Args:
65
+ message: The protocol buffer message that should be serialized.
66
+ output: BytesIO or custom buffered IO that data should be written to.
67
+ """
68
+ size = message.ByteSize()
69
+ encoder._VarintEncoder()(output.write, size)
70
+ out_size = output.write(serialize(message))
71
+
72
+ if out_size != size:
73
+ raise TypeError(
74
+ 'Failed to write complete message (wrote: %d, expected: %d)'
75
+ '. Ensure output is using buffered IO.' % (out_size, size)
76
+ )
77
+
78
+
79
+ def parse_length_prefixed(
80
+ message_class: Type[_MESSAGE], input_bytes: io.BytesIO
81
+ ) -> _MESSAGE:
82
+ """Parse a message from input_bytes.
83
+
84
+ Args:
85
+ message_class: The protocol buffer message class that parser should parse.
86
+ input_bytes: A buffered input.
87
+
88
+ Example usage:
89
+ while True:
90
+ msg = proto.parse_length_prefixed(message_class, input_bytes)
91
+ if msg is None:
92
+ break
93
+ ...
94
+
95
+ Returns:
96
+ A parsed message if successful. None if input_bytes is at EOF.
97
+ """
98
+ size = decoder._DecodeVarint(input_bytes)
99
+ if size is None:
100
+ # It is the end of buffered input. See example usage in the
101
+ # API description.
102
+ return None
103
+
104
+ message = message_class()
105
+
106
+ if size == 0:
107
+ return message
108
+
109
+ parsed_size = message.ParseFromString(input_bytes.read(size))
110
+ if parsed_size != size:
111
+ raise ValueError(
112
+ 'Truncated message or non-buffered input_bytes: '
113
+ 'Expected {0} bytes but only {1} bytes parsed for '
114
+ '{2}.'.format(size, parsed_size, message.DESCRIPTOR.name)
115
+ )
116
+ return message
117
+
118
+
119
+ def byte_size(message: Message) -> int:
120
+ """Returns the serialized size of this message.
121
+
122
+ Args:
123
+ message: A proto message.
124
+
125
+ Returns:
126
+ int: The number of bytes required to serialize this message.
127
+ """
128
+ return message.ByteSize()
129
+
130
+
131
+ def clear_message(message: Message) -> None:
132
+ """Clears all data that was set in the message.
133
+
134
+ Args:
135
+ message: The proto message to be cleared.
136
+ """
137
+ message.Clear()
138
+
139
+
140
+ def clear_field(message: Message, field_name: Text) -> None:
141
+ """Clears the contents of a given field.
142
+
143
+ Inside a oneof group, clears the field set. If the name neither refers to a
144
+ defined field or oneof group, :exc:`ValueError` is raised.
145
+
146
+ Args:
147
+ message: The proto message.
148
+ field_name (str): The name of the field to be cleared.
149
+
150
+ Raises:
151
+ ValueError: if the `field_name` is not a member of this message.
152
+ """
153
+ message.ClearField(field_name)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto_builder.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Dynamic Protobuf class creator."""
9
+
10
+ from collections import OrderedDict
11
+ import hashlib
12
+ import os
13
+
14
+ from google.protobuf import descriptor_pb2
15
+ from google.protobuf import descriptor
16
+ from google.protobuf import descriptor_pool
17
+ from google.protobuf import message_factory
18
+
19
+
20
+ def _GetMessageFromFactory(pool, full_name):
21
+ """Get a proto class from the MessageFactory by name.
22
+
23
+ Args:
24
+ pool: a descriptor pool.
25
+ full_name: str, the fully qualified name of the proto type.
26
+ Returns:
27
+ A class, for the type identified by full_name.
28
+ Raises:
29
+ KeyError, if the proto is not found in the factory's descriptor pool.
30
+ """
31
+ proto_descriptor = pool.FindMessageTypeByName(full_name)
32
+ proto_cls = message_factory.GetMessageClass(proto_descriptor)
33
+ return proto_cls
34
+
35
+
36
+ def MakeSimpleProtoClass(fields, full_name=None, pool=None):
37
+ """Create a Protobuf class whose fields are basic types.
38
+
39
+ Note: this doesn't validate field names!
40
+
41
+ Args:
42
+ fields: dict of {name: field_type} mappings for each field in the proto. If
43
+ this is an OrderedDict the order will be maintained, otherwise the
44
+ fields will be sorted by name.
45
+ full_name: optional str, the fully-qualified name of the proto type.
46
+ pool: optional DescriptorPool instance.
47
+ Returns:
48
+ a class, the new protobuf class with a FileDescriptor.
49
+ """
50
+ pool_instance = pool or descriptor_pool.DescriptorPool()
51
+ if full_name is not None:
52
+ try:
53
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
54
+ return proto_cls
55
+ except KeyError:
56
+ # The factory's DescriptorPool doesn't know about this class yet.
57
+ pass
58
+
59
+ # Get a list of (name, field_type) tuples from the fields dict. If fields was
60
+ # an OrderedDict we keep the order, but otherwise we sort the field to ensure
61
+ # consistent ordering.
62
+ field_items = fields.items()
63
+ if not isinstance(fields, OrderedDict):
64
+ field_items = sorted(field_items)
65
+
66
+ # Use a consistent file name that is unlikely to conflict with any imported
67
+ # proto files.
68
+ fields_hash = hashlib.sha1()
69
+ for f_name, f_type in field_items:
70
+ fields_hash.update(f_name.encode('utf-8'))
71
+ fields_hash.update(str(f_type).encode('utf-8'))
72
+ proto_file_name = fields_hash.hexdigest() + '.proto'
73
+
74
+ # If the proto is anonymous, use the same hash to name it.
75
+ if full_name is None:
76
+ full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
77
+ fields_hash.hexdigest())
78
+ try:
79
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
80
+ return proto_cls
81
+ except KeyError:
82
+ # The factory's DescriptorPool doesn't know about this class yet.
83
+ pass
84
+
85
+ # This is the first time we see this proto: add a new descriptor to the pool.
86
+ pool_instance.Add(
87
+ _MakeFileDescriptorProto(proto_file_name, full_name, field_items))
88
+ return _GetMessageFromFactory(pool_instance, full_name)
89
+
90
+
91
+ def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
92
+ """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
93
+ package, name = full_name.rsplit('.', 1)
94
+ file_proto = descriptor_pb2.FileDescriptorProto()
95
+ file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
96
+ file_proto.package = package
97
+ desc_proto = file_proto.message_type.add()
98
+ desc_proto.name = name
99
+ for f_number, (f_name, f_type) in enumerate(field_items, 1):
100
+ field_proto = desc_proto.field.add()
101
+ field_proto.name = f_name
102
+ # # If the number falls in the reserved range, reassign it to the correct
103
+ # # number after the range.
104
+ if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
105
+ f_number += (
106
+ descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
107
+ descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
108
+ field_proto.number = f_number
109
+ field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
110
+ field_proto.type = f_type
111
+ return file_proto
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto_json.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains the Nextgen Pythonic Protobuf JSON APIs."""
9
+
10
+ from typing import Optional, Type
11
+
12
+ from google.protobuf.message import Message
13
+ from google.protobuf.descriptor_pool import DescriptorPool
14
+ from google.protobuf import json_format
15
+
16
+ def serialize(
17
+ message: Message,
18
+ always_print_fields_with_no_presence: bool=False,
19
+ preserving_proto_field_name: bool=False,
20
+ use_integers_for_enums: bool=False,
21
+ descriptor_pool: Optional[DescriptorPool]=None,
22
+ float_precision: int=None,
23
+ ) -> dict:
24
+ """Converts protobuf message to a dictionary.
25
+
26
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
27
+
28
+ Args:
29
+ message: The protocol buffers message instance to serialize.
30
+ always_print_fields_with_no_presence: If True, fields without
31
+ presence (implicit presence scalars, repeated fields, and map fields) will
32
+ always be serialized. Any field that supports presence is not affected by
33
+ this option (including singular message fields and oneof fields).
34
+ preserving_proto_field_name: If True, use the original proto field names as
35
+ defined in the .proto file. If False, convert the field names to
36
+ lowerCamelCase.
37
+ use_integers_for_enums: If true, print integers instead of enum names.
38
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
39
+ default.
40
+ float_precision: If set, use this to specify float field valid digits.
41
+
42
+ Returns:
43
+ A dict representation of the protocol buffer message.
44
+ """
45
+ return json_format.MessageToDict(
46
+ message,
47
+ always_print_fields_with_no_presence=always_print_fields_with_no_presence,
48
+ preserving_proto_field_name=preserving_proto_field_name,
49
+ use_integers_for_enums=use_integers_for_enums,
50
+ float_precision=float_precision,
51
+ )
52
+
53
+ def parse(
54
+ message_class: Type[Message],
55
+ js_dict: dict,
56
+ ignore_unknown_fields: bool=False,
57
+ descriptor_pool: Optional[DescriptorPool]=None,
58
+ max_recursion_depth: int=100
59
+ ) -> Message:
60
+ """Parses a JSON dictionary representation into a message.
61
+
62
+ Args:
63
+ message_class: The message meta class.
64
+ js_dict: Dict representation of a JSON message.
65
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
66
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
67
+ default.
68
+ max_recursion_depth: max recursion depth of JSON message to be deserialized.
69
+ JSON messages over this depth will fail to be deserialized. Default value
70
+ is 100.
71
+
72
+ Returns:
73
+ A new message passed from json_dict.
74
+ """
75
+ new_message = message_class()
76
+ json_format.ParseDict(
77
+ js_dict=js_dict,
78
+ message=new_message,
79
+ ignore_unknown_fields=ignore_unknown_fields,
80
+ descriptor_pool=descriptor_pool,
81
+ max_recursion_depth=max_recursion_depth,
82
+ )
83
+ return new_message
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/proto_text.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2025 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains the Nextgen Pythonic Protobuf Text Format APIs."""
9
+ from typing import AnyStr, Callable, Optional, Text, Type, Union
10
+
11
+ from google.protobuf import text_format
12
+ from google.protobuf.descriptor_pool import DescriptorPool
13
+ from google.protobuf.message import Message
14
+
15
+ _MsgFormatter = Callable[[Message, Union[int, bool], bool], Optional[Text]]
16
+
17
+
18
+ def serialize(
19
+ message: Message,
20
+ as_utf8: bool = True,
21
+ as_one_line: bool = False,
22
+ use_short_repeated_primitives: bool = False,
23
+ pointy_brackets: bool = False,
24
+ use_index_order: bool = False,
25
+ float_format: Optional[str] = None,
26
+ double_format: Optional[str] = None,
27
+ use_field_number: bool = False,
28
+ descriptor_pool: Optional[DescriptorPool] = None,
29
+ indent: int = 0,
30
+ message_formatter: Optional[_MsgFormatter] = None,
31
+ print_unknown_fields: bool = False,
32
+ force_colon: bool = False,
33
+ ) -> str:
34
+ """Convert protobuf message to text format.
35
+
36
+ Double values can be formatted compactly with 15 digits of
37
+ precision (which is the most that IEEE 754 "double" can guarantee)
38
+ using double_format='.15g'. To ensure that converting to text and back to a
39
+ proto will result in an identical value, double_format='.17g' should be used.
40
+
41
+ Args:
42
+ message: The protocol buffers message.
43
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
44
+ as_one_line: Don't introduce newlines between fields.
45
+ use_short_repeated_primitives: Use short repeated format for primitives.
46
+ pointy_brackets: If True, use angle brackets instead of curly braces for
47
+ nesting.
48
+ use_index_order: If True, fields of a proto message will be printed using
49
+ the order defined in source code instead of the field number, extensions
50
+ will be printed at the end of the message and their relative order is
51
+ determined by the extension number. By default, use the field number
52
+ order.
53
+ float_format (str): If set, use this to specify float field formatting (per
54
+ the "Format Specification Mini-Language"); otherwise, shortest float that
55
+ has same value in wire will be printed. Also affect double field if
56
+ double_format is not set but float_format is set.
57
+ double_format (str): If set, use this to specify double field formatting
58
+ (per the "Format Specification Mini-Language"); if it is not set but
59
+ float_format is set, use float_format. Otherwise, use ``str()``
60
+ use_field_number: If True, print field numbers instead of names.
61
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
62
+ indent (int): The initial indent level, in terms of spaces, for pretty
63
+ print.
64
+ message_formatter (function(message, indent, as_one_line) -> unicode|None):
65
+ Custom formatter for selected sub-messages (usually based on message
66
+ type). Use to pretty print parts of the protobuf for easier diffing.
67
+ print_unknown_fields: If True, unknown fields will be printed.
68
+ force_colon: If set, a colon will be added after the field name even if the
69
+ field is a proto message.
70
+
71
+ Returns:
72
+ str: A string of the text formatted protocol buffer message.
73
+ """
74
+ return text_format.MessageToString(
75
+ message=message,
76
+ as_utf8=as_utf8,
77
+ as_one_line=as_one_line,
78
+ use_short_repeated_primitives=use_short_repeated_primitives,
79
+ pointy_brackets=pointy_brackets,
80
+ use_index_order=use_index_order,
81
+ float_format=float_format,
82
+ double_format=double_format,
83
+ use_field_number=use_field_number,
84
+ descriptor_pool=descriptor_pool,
85
+ indent=indent,
86
+ message_formatter=message_formatter,
87
+ print_unknown_fields=print_unknown_fields,
88
+ force_colon=force_colon,
89
+ )
90
+
91
+
92
+ def parse(
93
+ message_class: Type[Message],
94
+ text: AnyStr,
95
+ allow_unknown_extension: bool = False,
96
+ allow_field_number: bool = False,
97
+ descriptor_pool: Optional[DescriptorPool] = None,
98
+ allow_unknown_field: bool = False,
99
+ ) -> Message:
100
+ """Parses a text representation of a protocol message into a message.
101
+
102
+ Args:
103
+ message_class: The message meta class.
104
+ text (str): Message text representation.
105
+ message (Message): A protocol buffer message to merge into.
106
+ allow_unknown_extension: if True, skip over missing extensions and keep
107
+ parsing
108
+ allow_field_number: if True, both field number and field name are allowed.
109
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
110
+ allow_unknown_field: if True, skip over unknown field and keep parsing.
111
+ Avoid to use this option if possible. It may hide some errors (e.g.
112
+ spelling error on field name)
113
+
114
+ Returns:
115
+ Message: A new message passed from text.
116
+
117
+ Raises:
118
+ ParseError: On text parsing problems.
119
+ """
120
+ new_message = message_class()
121
+ text_format.Parse(
122
+ text=text,
123
+ message=new_message,
124
+ allow_unknown_extension=allow_unknown_extension,
125
+ allow_field_number=allow_field_number,
126
+ descriptor_pool=descriptor_pool,
127
+ allow_unknown_field=allow_unknown_field,
128
+ )
129
+ return new_message
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/pyext/__init__.py ADDED
File without changes
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/pyext/cpp_message.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Protocol message implementation hooks for C++ implementation.
9
+
10
+ Contains helper functions used to create protocol message classes from
11
+ Descriptor objects at runtime backed by the protocol buffer C++ API.
12
+ """
13
+
14
+ __author__ = 'tibell@google.com (Johan Tibell)'
15
+
16
+ from google.protobuf.internal import api_implementation
17
+
18
+
19
+ # pylint: disable=protected-access
20
+ _message = api_implementation._c_module
21
+ # TODO: Remove this import after fix api_implementation
22
+ if _message is None:
23
+ from google.protobuf.pyext import _message
24
+
25
+
26
+ class GeneratedProtocolMessageType(_message.MessageMeta):
27
+
28
+ """Metaclass for protocol message classes created at runtime from Descriptors.
29
+
30
+ The protocol compiler currently uses this metaclass to create protocol
31
+ message classes at runtime. Clients can also manually create their own
32
+ classes at runtime, as in this example:
33
+
34
+ mydescriptor = Descriptor(.....)
35
+ factory = symbol_database.Default()
36
+ factory.pool.AddDescriptor(mydescriptor)
37
+ MyProtoClass = message_factory.GetMessageClass(mydescriptor)
38
+ myproto_instance = MyProtoClass()
39
+ myproto.foo_field = 23
40
+ ...
41
+
42
+ The above example will not work for nested types. If you wish to include them,
43
+ use reflection.MakeClass() instead of manually instantiating the class in
44
+ order to create the appropriate class structure.
45
+ """
46
+
47
+ # Must be consistent with the protocol-compiler code in
48
+ # proto2/compiler/internal/generator.*.
49
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/reflection.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ # This code is meant to work on Python 2.4 and above only.
9
+
10
+ """Contains a metaclass and helper functions used to create
11
+ protocol message classes from Descriptor objects at runtime.
12
+
13
+ Recall that a metaclass is the "type" of a class.
14
+ (A class is to a metaclass what an instance is to a class.)
15
+
16
+ In this case, we use the GeneratedProtocolMessageType metaclass
17
+ to inject all the useful functionality into the classes
18
+ output by the protocol compiler at compile-time.
19
+
20
+ The upshot of all this is that the real implementation
21
+ details for ALL pure-Python protocol buffers are *here in
22
+ this file*.
23
+ """
24
+
25
+ __author__ = 'robinson@google.com (Will Robinson)'
26
+
27
+ import warnings
28
+
29
+ from google.protobuf import message_factory
30
+ from google.protobuf import symbol_database
31
+
32
+ # The type of all Message classes.
33
+ # Part of the public interface, but normally only used by message factories.
34
+ GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
35
+
36
+ MESSAGE_CLASS_CACHE = {}
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/runtime_version.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Protobuf Runtime versions and validators.
9
+
10
+ It should only be accessed by Protobuf gencodes and tests. DO NOT USE it
11
+ elsewhere.
12
+ """
13
+
14
+ __author__ = 'shaod@google.com (Dennis Shao)'
15
+
16
+ from enum import Enum
17
+ import os
18
+ import warnings
19
+
20
+
21
+ class Domain(Enum):
22
+ GOOGLE_INTERNAL = 1
23
+ PUBLIC = 2
24
+
25
+
26
+ # The versions of this Python Protobuf runtime to be changed automatically by
27
+ # the Protobuf release process. Do not edit them manually.
28
+ # These OSS versions are not stripped to avoid merging conflicts.
29
+ OSS_DOMAIN = Domain.PUBLIC
30
+ OSS_MAJOR = 6
31
+ OSS_MINOR = 32
32
+ OSS_PATCH = 0
33
+ OSS_SUFFIX = ''
34
+
35
+ DOMAIN = OSS_DOMAIN
36
+ MAJOR = OSS_MAJOR
37
+ MINOR = OSS_MINOR
38
+ PATCH = OSS_PATCH
39
+ SUFFIX = OSS_SUFFIX
40
+
41
+ # Avoid flooding of warnings.
42
+ _MAX_WARNING_COUNT = 20
43
+ _warning_count = 0
44
+
45
+ class VersionError(Exception):
46
+ """Exception class for version violation."""
47
+
48
+
49
+ def _ReportVersionError(msg):
50
+ raise VersionError(msg)
51
+
52
+
53
+ def ValidateProtobufRuntimeVersion(
54
+ gen_domain, gen_major, gen_minor, gen_patch, gen_suffix, location
55
+ ):
56
+ """Function to validate versions.
57
+
58
+ Args:
59
+ gen_domain: The domain where the code was generated from.
60
+ gen_major: The major version number of the gencode.
61
+ gen_minor: The minor version number of the gencode.
62
+ gen_patch: The patch version number of the gencode.
63
+ gen_suffix: The version suffix e.g. '-dev', '-rc1' of the gencode.
64
+ location: The proto location that causes the version violation.
65
+
66
+ Raises:
67
+ VersionError: if gencode version is invalid or incompatible with the
68
+ runtime.
69
+ """
70
+
71
+ disable_flag = os.getenv('TEMPORARILY_DISABLE_PROTOBUF_VERSION_CHECK')
72
+ if disable_flag is not None and disable_flag.lower() == 'true':
73
+ return
74
+
75
+ global _warning_count
76
+
77
+ version = f'{MAJOR}.{MINOR}.{PATCH}{SUFFIX}'
78
+ gen_version = f'{gen_major}.{gen_minor}.{gen_patch}{gen_suffix}'
79
+
80
+ if gen_major < 0 or gen_minor < 0 or gen_patch < 0:
81
+ raise VersionError(f'Invalid gencode version: {gen_version}')
82
+
83
+ error_prompt = (
84
+ 'See Protobuf version guarantees at'
85
+ ' https://protobuf.dev/support/cross-version-runtime-guarantee.'
86
+ )
87
+
88
+ if gen_domain != DOMAIN:
89
+ _ReportVersionError(
90
+ 'Detected mismatched Protobuf Gencode/Runtime domains when loading'
91
+ f' {location}: gencode {gen_domain.name} runtime {DOMAIN.name}.'
92
+ ' Cross-domain usage of Protobuf is not supported.'
93
+ )
94
+
95
+ if (
96
+ MAJOR < gen_major
97
+ or (MAJOR == gen_major and MINOR < gen_minor)
98
+ or (MAJOR == gen_major and MINOR == gen_minor and PATCH < gen_patch)
99
+ ):
100
+ _ReportVersionError(
101
+ 'Detected incompatible Protobuf Gencode/Runtime versions when loading'
102
+ f' {location}: gencode {gen_version} runtime {version}. Runtime version'
103
+ f' cannot be older than the linked gencode version. {error_prompt}'
104
+ )
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/service_reflection.py ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains metaclasses used to create protocol service and service stub
9
+ classes from ServiceDescriptor objects at runtime.
10
+
11
+ The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
12
+ inject all useful functionality into the classes output by the protocol
13
+ compiler at compile-time.
14
+ """
15
+
16
+ __author__ = 'petar@google.com (Petar Petrov)'
17
+
18
+
19
+ class GeneratedServiceType(type):
20
+
21
+ """Metaclass for service classes created at runtime from ServiceDescriptors.
22
+
23
+ Implementations for all methods described in the Service class are added here
24
+ by this class. We also create properties to allow getting/setting all fields
25
+ in the protocol message.
26
+
27
+ The protocol compiler currently uses this metaclass to create protocol service
28
+ classes at runtime. Clients can also manually create their own classes at
29
+ runtime, as in this example::
30
+
31
+ mydescriptor = ServiceDescriptor(.....)
32
+ class MyProtoService(service.Service):
33
+ __metaclass__ = GeneratedServiceType
34
+ DESCRIPTOR = mydescriptor
35
+ myservice_instance = MyProtoService()
36
+ # ...
37
+ """
38
+
39
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
40
+
41
+ def __init__(cls, name, bases, dictionary):
42
+ """Creates a message service class.
43
+
44
+ Args:
45
+ name: Name of the class (ignored, but required by the metaclass
46
+ protocol).
47
+ bases: Base classes of the class being constructed.
48
+ dictionary: The class dictionary of the class being constructed.
49
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
50
+ describing this protocol service type.
51
+ """
52
+ # Don't do anything if this class doesn't have a descriptor. This happens
53
+ # when a service class is subclassed.
54
+ if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
55
+ return
56
+
57
+ descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
58
+ service_builder = _ServiceBuilder(descriptor)
59
+ service_builder.BuildService(cls)
60
+ cls.DESCRIPTOR = descriptor
61
+
62
+
63
+ class GeneratedServiceStubType(GeneratedServiceType):
64
+
65
+ """Metaclass for service stubs created at runtime from ServiceDescriptors.
66
+
67
+ This class has similar responsibilities as GeneratedServiceType, except that
68
+ it creates the service stub classes.
69
+ """
70
+
71
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
72
+
73
+ def __init__(cls, name, bases, dictionary):
74
+ """Creates a message service stub class.
75
+
76
+ Args:
77
+ name: Name of the class (ignored, here).
78
+ bases: Base classes of the class being constructed.
79
+ dictionary: The class dictionary of the class being constructed.
80
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
81
+ describing this protocol service type.
82
+ """
83
+ super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
84
+ # Don't do anything if this class doesn't have a descriptor. This happens
85
+ # when a service stub is subclassed.
86
+ if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
87
+ return
88
+
89
+ descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
90
+ service_stub_builder = _ServiceStubBuilder(descriptor)
91
+ service_stub_builder.BuildServiceStub(cls)
92
+
93
+
94
+ class _ServiceBuilder(object):
95
+
96
+ """This class constructs a protocol service class using a service descriptor.
97
+
98
+ Given a service descriptor, this class constructs a class that represents
99
+ the specified service descriptor. One service builder instance constructs
100
+ exactly one service class. That means all instances of that class share the
101
+ same builder.
102
+ """
103
+
104
+ def __init__(self, service_descriptor):
105
+ """Initializes an instance of the service class builder.
106
+
107
+ Args:
108
+ service_descriptor: ServiceDescriptor to use when constructing the
109
+ service class.
110
+ """
111
+ self.descriptor = service_descriptor
112
+
113
+ def BuildService(builder, cls):
114
+ """Constructs the service class.
115
+
116
+ Args:
117
+ cls: The class that will be constructed.
118
+ """
119
+
120
+ # CallMethod needs to operate with an instance of the Service class. This
121
+ # internal wrapper function exists only to be able to pass the service
122
+ # instance to the method that does the real CallMethod work.
123
+ # Making sure to use exact argument names from the abstract interface in
124
+ # service.py to match the type signature
125
+ def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done):
126
+ return builder._CallMethod(self, method_descriptor, rpc_controller,
127
+ request, done)
128
+
129
+ def _WrapGetRequestClass(self, method_descriptor):
130
+ return builder._GetRequestClass(method_descriptor)
131
+
132
+ def _WrapGetResponseClass(self, method_descriptor):
133
+ return builder._GetResponseClass(method_descriptor)
134
+
135
+ builder.cls = cls
136
+ cls.CallMethod = _WrapCallMethod
137
+ cls.GetDescriptor = staticmethod(lambda: builder.descriptor)
138
+ cls.GetDescriptor.__doc__ = 'Returns the service descriptor.'
139
+ cls.GetRequestClass = _WrapGetRequestClass
140
+ cls.GetResponseClass = _WrapGetResponseClass
141
+ for method in builder.descriptor.methods:
142
+ setattr(cls, method.name, builder._GenerateNonImplementedMethod(method))
143
+
144
+ def _CallMethod(self, srvc, method_descriptor,
145
+ rpc_controller, request, callback):
146
+ """Calls the method described by a given method descriptor.
147
+
148
+ Args:
149
+ srvc: Instance of the service for which this method is called.
150
+ method_descriptor: Descriptor that represent the method to call.
151
+ rpc_controller: RPC controller to use for this method's execution.
152
+ request: Request protocol message.
153
+ callback: A callback to invoke after the method has completed.
154
+ """
155
+ if method_descriptor.containing_service != self.descriptor:
156
+ raise RuntimeError(
157
+ 'CallMethod() given method descriptor for wrong service type.')
158
+ method = getattr(srvc, method_descriptor.name)
159
+ return method(rpc_controller, request, callback)
160
+
161
+ def _GetRequestClass(self, method_descriptor):
162
+ """Returns the class of the request protocol message.
163
+
164
+ Args:
165
+ method_descriptor: Descriptor of the method for which to return the
166
+ request protocol message class.
167
+
168
+ Returns:
169
+ A class that represents the input protocol message of the specified
170
+ method.
171
+ """
172
+ if method_descriptor.containing_service != self.descriptor:
173
+ raise RuntimeError(
174
+ 'GetRequestClass() given method descriptor for wrong service type.')
175
+ return method_descriptor.input_type._concrete_class
176
+
177
+ def _GetResponseClass(self, method_descriptor):
178
+ """Returns the class of the response protocol message.
179
+
180
+ Args:
181
+ method_descriptor: Descriptor of the method for which to return the
182
+ response protocol message class.
183
+
184
+ Returns:
185
+ A class that represents the output protocol message of the specified
186
+ method.
187
+ """
188
+ if method_descriptor.containing_service != self.descriptor:
189
+ raise RuntimeError(
190
+ 'GetResponseClass() given method descriptor for wrong service type.')
191
+ return method_descriptor.output_type._concrete_class
192
+
193
+ def _GenerateNonImplementedMethod(self, method):
194
+ """Generates and returns a method that can be set for a service methods.
195
+
196
+ Args:
197
+ method: Descriptor of the service method for which a method is to be
198
+ generated.
199
+
200
+ Returns:
201
+ A method that can be added to the service class.
202
+ """
203
+ return lambda inst, rpc_controller, request, callback: (
204
+ self._NonImplementedMethod(method.name, rpc_controller, callback))
205
+
206
+ def _NonImplementedMethod(self, method_name, rpc_controller, callback):
207
+ """The body of all methods in the generated service class.
208
+
209
+ Args:
210
+ method_name: Name of the method being executed.
211
+ rpc_controller: RPC controller used to execute this method.
212
+ callback: A callback which will be invoked when the method finishes.
213
+ """
214
+ rpc_controller.SetFailed('Method %s not implemented.' % method_name)
215
+ callback(None)
216
+
217
+
218
+ class _ServiceStubBuilder(object):
219
+
220
+ """Constructs a protocol service stub class using a service descriptor.
221
+
222
+ Given a service descriptor, this class constructs a suitable stub class.
223
+ A stub is just a type-safe wrapper around an RpcChannel which emulates a
224
+ local implementation of the service.
225
+
226
+ One service stub builder instance constructs exactly one class. It means all
227
+ instances of that class share the same service stub builder.
228
+ """
229
+
230
+ def __init__(self, service_descriptor):
231
+ """Initializes an instance of the service stub class builder.
232
+
233
+ Args:
234
+ service_descriptor: ServiceDescriptor to use when constructing the
235
+ stub class.
236
+ """
237
+ self.descriptor = service_descriptor
238
+
239
+ def BuildServiceStub(self, cls):
240
+ """Constructs the stub class.
241
+
242
+ Args:
243
+ cls: The class that will be constructed.
244
+ """
245
+
246
+ def _ServiceStubInit(stub, rpc_channel):
247
+ stub.rpc_channel = rpc_channel
248
+ self.cls = cls
249
+ cls.__init__ = _ServiceStubInit
250
+ for method in self.descriptor.methods:
251
+ setattr(cls, method.name, self._GenerateStubMethod(method))
252
+
253
+ def _GenerateStubMethod(self, method):
254
+ return (lambda inst, rpc_controller, request, callback=None:
255
+ self._StubMethod(inst, method, rpc_controller, request, callback))
256
+
257
+ def _StubMethod(self, stub, method_descriptor,
258
+ rpc_controller, request, callback):
259
+ """The body of all service methods in the generated stub class.
260
+
261
+ Args:
262
+ stub: Stub instance.
263
+ method_descriptor: Descriptor of the invoked method.
264
+ rpc_controller: Rpc controller to execute the method.
265
+ request: Request protocol message.
266
+ callback: A callback to execute when the method finishes.
267
+ Returns:
268
+ Response message (in case of blocking call).
269
+ """
270
+ return stub.rpc_channel.CallMethod(
271
+ method_descriptor, rpc_controller, request,
272
+ method_descriptor.output_type._concrete_class, callback)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/source_context_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/source_context.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/source_context.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\",\n\rSourceContext\x12\x1b\n\tfile_name\x18\x01 \x01(\tR\x08\x66ileNameB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_SOURCECONTEXT']._serialized_start=57
36
+ _globals['_SOURCECONTEXT']._serialized_end=101
37
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/struct_pb2.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/struct.proto
5
+ # Protobuf Python Version: 6.32.0
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 6,
15
+ 32,
16
+ 0,
17
+ '',
18
+ 'google/protobuf/struct.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x98\x01\n\x06Struct\x12;\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntryR\x06\x66ields\x1aQ\n\x0b\x46ieldsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x05value:\x02\x38\x01\"\xb2\x02\n\x05Value\x12;\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00R\tnullValue\x12#\n\x0cnumber_value\x18\x02 \x01(\x01H\x00R\x0bnumberValue\x12#\n\x0cstring_value\x18\x03 \x01(\tH\x00R\x0bstringValue\x12\x1f\n\nbool_value\x18\x04 \x01(\x08H\x00R\tboolValue\x12<\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00R\x0bstructValue\x12;\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00R\tlistValueB\x06\n\x04kind\";\n\tListValue\x12.\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.ValueR\x06values*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_STRUCT_FIELDSENTRY']._loaded_options = None
36
+ _globals['_STRUCT_FIELDSENTRY']._serialized_options = b'8\001'
37
+ _globals['_NULLVALUE']._serialized_start=574
38
+ _globals['_NULLVALUE']._serialized_end=601
39
+ _globals['_STRUCT']._serialized_start=50
40
+ _globals['_STRUCT']._serialized_end=202
41
+ _globals['_STRUCT_FIELDSENTRY']._serialized_start=121
42
+ _globals['_STRUCT_FIELDSENTRY']._serialized_end=202
43
+ _globals['_VALUE']._serialized_start=205
44
+ _globals['_VALUE']._serialized_end=511
45
+ _globals['_LISTVALUE']._serialized_start=513
46
+ _globals['_LISTVALUE']._serialized_end=572
47
+ # @@protoc_insertion_point(module_scope)
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/symbol_database.py ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A database of Python protocol buffer generated symbols.
9
+
10
+ SymbolDatabase is the MessageFactory for messages generated at compile time,
11
+ and makes it easy to create new instances of a registered type, given only the
12
+ type's protocol buffer symbol name.
13
+
14
+ Example usage::
15
+
16
+ db = symbol_database.SymbolDatabase()
17
+
18
+ # Register symbols of interest, from one or multiple files.
19
+ db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
20
+ db.RegisterMessage(my_proto_pb2.MyMessage)
21
+ db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
22
+
23
+ # The database can be used as a MessageFactory, to generate types based on
24
+ # their name:
25
+ types = db.GetMessages(['my_proto.proto'])
26
+ my_message_instance = types['MyMessage']()
27
+
28
+ # The database's underlying descriptor pool can be queried, so it's not
29
+ # necessary to know a type's filename to be able to generate it:
30
+ filename = db.pool.FindFileContainingSymbol('MyMessage')
31
+ my_message_instance = db.GetMessages([filename])['MyMessage']()
32
+
33
+ # This functionality is also provided directly via a convenience method:
34
+ my_message_instance = db.GetSymbol('MyMessage')()
35
+ """
36
+
37
+ import warnings
38
+
39
+ from google.protobuf.internal import api_implementation
40
+ from google.protobuf import descriptor_pool
41
+ from google.protobuf import message_factory
42
+
43
+
44
+ class SymbolDatabase():
45
+ """A database of Python generated symbols."""
46
+
47
+ # local cache of registered classes.
48
+ _classes = {}
49
+
50
+ def __init__(self, pool=None):
51
+ """Initializes a new SymbolDatabase."""
52
+ self.pool = pool or descriptor_pool.DescriptorPool()
53
+
54
+ def RegisterMessage(self, message):
55
+ """Registers the given message type in the local database.
56
+
57
+ Calls to GetSymbol() and GetMessages() will return messages registered here.
58
+
59
+ Args:
60
+ message: A :class:`google.protobuf.message.Message` subclass (or
61
+ instance); its descriptor will be registered.
62
+
63
+ Returns:
64
+ The provided message.
65
+ """
66
+
67
+ desc = message.DESCRIPTOR
68
+ self._classes[desc] = message
69
+ self.RegisterMessageDescriptor(desc)
70
+ return message
71
+
72
+ def RegisterMessageDescriptor(self, message_descriptor):
73
+ """Registers the given message descriptor in the local database.
74
+
75
+ Args:
76
+ message_descriptor (Descriptor): the message descriptor to add.
77
+ """
78
+ if api_implementation.Type() == 'python':
79
+ # pylint: disable=protected-access
80
+ self.pool._AddDescriptor(message_descriptor)
81
+
82
+ def RegisterEnumDescriptor(self, enum_descriptor):
83
+ """Registers the given enum descriptor in the local database.
84
+
85
+ Args:
86
+ enum_descriptor (EnumDescriptor): The enum descriptor to register.
87
+
88
+ Returns:
89
+ EnumDescriptor: The provided descriptor.
90
+ """
91
+ if api_implementation.Type() == 'python':
92
+ # pylint: disable=protected-access
93
+ self.pool._AddEnumDescriptor(enum_descriptor)
94
+ return enum_descriptor
95
+
96
+ def RegisterServiceDescriptor(self, service_descriptor):
97
+ """Registers the given service descriptor in the local database.
98
+
99
+ Args:
100
+ service_descriptor (ServiceDescriptor): the service descriptor to
101
+ register.
102
+ """
103
+ if api_implementation.Type() == 'python':
104
+ # pylint: disable=protected-access
105
+ self.pool._AddServiceDescriptor(service_descriptor)
106
+
107
+ def RegisterFileDescriptor(self, file_descriptor):
108
+ """Registers the given file descriptor in the local database.
109
+
110
+ Args:
111
+ file_descriptor (FileDescriptor): The file descriptor to register.
112
+ """
113
+ if api_implementation.Type() == 'python':
114
+ # pylint: disable=protected-access
115
+ self.pool._InternalAddFileDescriptor(file_descriptor)
116
+
117
+ def GetSymbol(self, symbol):
118
+ """Tries to find a symbol in the local database.
119
+
120
+ Currently, this method only returns message.Message instances, however, if
121
+ may be extended in future to support other symbol types.
122
+
123
+ Args:
124
+ symbol (str): a protocol buffer symbol.
125
+
126
+ Returns:
127
+ A Python class corresponding to the symbol.
128
+
129
+ Raises:
130
+ KeyError: if the symbol could not be found.
131
+ """
132
+
133
+ return self._classes[self.pool.FindMessageTypeByName(symbol)]
134
+
135
+ def GetMessages(self, files):
136
+ # TODO: Fix the differences with MessageFactory.
137
+ """Gets all registered messages from a specified file.
138
+
139
+ Only messages already created and registered will be returned; (this is the
140
+ case for imported _pb2 modules)
141
+ But unlike MessageFactory, this version also returns already defined nested
142
+ messages, but does not register any message extensions.
143
+
144
+ Args:
145
+ files (list[str]): The file names to extract messages from.
146
+
147
+ Returns:
148
+ A dictionary mapping proto names to the message classes.
149
+
150
+ Raises:
151
+ KeyError: if a file could not be found.
152
+ """
153
+
154
+ def _GetAllMessages(desc):
155
+ """Walk a message Descriptor and recursively yields all message names."""
156
+ yield desc
157
+ for msg_desc in desc.nested_types:
158
+ for nested_desc in _GetAllMessages(msg_desc):
159
+ yield nested_desc
160
+
161
+ result = {}
162
+ for file_name in files:
163
+ file_desc = self.pool.FindFileByName(file_name)
164
+ for msg_desc in file_desc.message_types_by_name.values():
165
+ for desc in _GetAllMessages(msg_desc):
166
+ try:
167
+ result[desc.full_name] = self._classes[desc]
168
+ except KeyError:
169
+ # This descriptor has no registered class, skip it.
170
+ pass
171
+ return result
172
+
173
+
174
+ _DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
175
+
176
+
177
+ def Default():
178
+ """Returns the default SymbolDatabase."""
179
+ return _DEFAULT
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/testdata/__init__.py ADDED
File without changes
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/text_encoding.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Encoding related utilities."""
9
+ import re
10
+
11
+ def _AsciiIsPrint(i):
12
+ return i >= 32 and i < 127
13
+
14
+ def _MakeStrEscapes():
15
+ ret = {}
16
+ for i in range(0, 128):
17
+ if not _AsciiIsPrint(i):
18
+ ret[i] = r'\%03o' % i
19
+ ret[ord('\t')] = r'\t' # optional escape
20
+ ret[ord('\n')] = r'\n' # optional escape
21
+ ret[ord('\r')] = r'\r' # optional escape
22
+ ret[ord('"')] = r'\"' # necessary escape
23
+ ret[ord('\'')] = r"\'" # optional escape
24
+ ret[ord('\\')] = r'\\' # necessary escape
25
+ return ret
26
+
27
+ # Maps int -> char, performing string escapes.
28
+ _str_escapes = _MakeStrEscapes()
29
+
30
+ # Maps int -> char, performing byte escaping and string escapes
31
+ _byte_escapes = {i: chr(i) for i in range(0, 256)}
32
+ _byte_escapes.update(_str_escapes)
33
+ _byte_escapes.update({i: r'\%03o' % i for i in range(128, 256)})
34
+
35
+
36
+ def _DecodeUtf8EscapeErrors(text_bytes):
37
+ ret = ''
38
+ while text_bytes:
39
+ try:
40
+ ret += text_bytes.decode('utf-8').translate(_str_escapes)
41
+ text_bytes = ''
42
+ except UnicodeDecodeError as e:
43
+ ret += text_bytes[:e.start].decode('utf-8').translate(_str_escapes)
44
+ ret += _byte_escapes[text_bytes[e.start]]
45
+ text_bytes = text_bytes[e.start+1:]
46
+ return ret
47
+
48
+
49
+ def CEscape(text, as_utf8) -> str:
50
+ """Escape a bytes string for use in an text protocol buffer.
51
+
52
+ Args:
53
+ text: A byte string to be escaped.
54
+ as_utf8: Specifies if result may contain non-ASCII characters.
55
+ In Python 3 this allows unescaped non-ASCII Unicode characters.
56
+ In Python 2 the return value will be valid UTF-8 rather than only ASCII.
57
+ Returns:
58
+ Escaped string (str).
59
+ """
60
+ # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not
61
+ # satisfy our needs; they encodes unprintable characters using two-digit hex
62
+ # escapes whereas our C++ unescaping function allows hex escapes to be any
63
+ # length. So, "\0011".encode('string_escape') ends up being "\\x011", which
64
+ # will be decoded in C++ as a single-character string with char code 0x11.
65
+ text_is_unicode = isinstance(text, str)
66
+ if as_utf8:
67
+ if text_is_unicode:
68
+ return text.translate(_str_escapes)
69
+ else:
70
+ return _DecodeUtf8EscapeErrors(text)
71
+ else:
72
+ if text_is_unicode:
73
+ text = text.encode('utf-8')
74
+ return ''.join([_byte_escapes[c] for c in text])
75
+
76
+
77
+ _CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
78
+
79
+
80
+ def CUnescape(text: str) -> bytes:
81
+ """Unescape a text string with C-style escape sequences to UTF-8 bytes.
82
+
83
+ Args:
84
+ text: The data to parse in a str.
85
+ Returns:
86
+ A byte string.
87
+ """
88
+
89
+ def ReplaceHex(m):
90
+ # Only replace the match if the number of leading back slashes is odd. i.e.
91
+ # the slash itself is not escaped.
92
+ if len(m.group(1)) & 1:
93
+ return m.group(1) + 'x0' + m.group(2)
94
+ return m.group(0)
95
+
96
+ # This is required because the 'string_escape' encoding doesn't
97
+ # allow single-digit hex escapes (like '\xf').
98
+ result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
99
+
100
+ # Replaces Unicode escape sequences with their character equivalents.
101
+ result = result.encode('raw_unicode_escape').decode('raw_unicode_escape')
102
+ # Encode Unicode characters as UTF-8, then decode to Latin-1 escaping
103
+ # unprintable characters.
104
+ result = result.encode('utf-8').decode('unicode_escape')
105
+ # Convert Latin-1 text back to a byte string (latin-1 codec also works here).
106
+ return result.encode('latin-1')
tool_server/.venv/lib/python3.12/site-packages/google/protobuf/text_format.py ADDED
@@ -0,0 +1,1884 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains routines for printing protocol messages in text format.
9
+
10
+ Simple usage example::
11
+
12
+ # Create a proto object and serialize it to a text proto string.
13
+ message = my_proto_pb2.MyMessage(foo='bar')
14
+ text_proto = text_format.MessageToString(message)
15
+
16
+ # Parse a text proto string.
17
+ message = text_format.Parse(text_proto, my_proto_pb2.MyMessage())
18
+ """
19
+
20
+ __author__ = 'kenton@google.com (Kenton Varda)'
21
+
22
+ # TODO Import thread contention leads to test failures.
23
+ import encodings.raw_unicode_escape # pylint: disable=unused-import
24
+ import encodings.unicode_escape # pylint: disable=unused-import
25
+ import io
26
+ import math
27
+ import re
28
+ import warnings
29
+
30
+ from google.protobuf.internal import decoder
31
+ from google.protobuf.internal import type_checkers
32
+ from google.protobuf import descriptor
33
+ from google.protobuf import text_encoding
34
+ from google.protobuf import unknown_fields
35
+
36
+ # pylint: disable=g-import-not-at-top
37
+ __all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField',
38
+ 'PrintFieldValue', 'Merge', 'MessageToBytes']
39
+
40
+ _INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
41
+ type_checkers.Int32ValueChecker(),
42
+ type_checkers.Uint64ValueChecker(),
43
+ type_checkers.Int64ValueChecker())
44
+ _FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE)
45
+ _FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE)
46
+ _FLOAT_OCTAL_PREFIX = re.compile('-?0[0-9]+')
47
+ _QUOTES = frozenset(("'", '"'))
48
+ _ANY_FULL_TYPE_NAME = 'google.protobuf.Any'
49
+ _DEBUG_STRING_SILENT_MARKER = '\t '
50
+
51
+ _as_utf8_default = True
52
+
53
+
54
+ class Error(Exception):
55
+ """Top-level module error for text_format."""
56
+
57
+
58
+ class ParseError(Error):
59
+ """Thrown in case of text parsing or tokenizing error."""
60
+
61
+ def __init__(self, message=None, line=None, column=None):
62
+ if message is not None and line is not None:
63
+ loc = str(line)
64
+ if column is not None:
65
+ loc += ':{0}'.format(column)
66
+ message = '{0} : {1}'.format(loc, message)
67
+ if message is not None:
68
+ super(ParseError, self).__init__(message)
69
+ else:
70
+ super(ParseError, self).__init__()
71
+ self._line = line
72
+ self._column = column
73
+
74
+ def GetLine(self):
75
+ return self._line
76
+
77
+ def GetColumn(self):
78
+ return self._column
79
+
80
+
81
+ class TextWriter(object):
82
+
83
+ def __init__(self, as_utf8):
84
+ self._writer = io.StringIO()
85
+
86
+ def write(self, val):
87
+ return self._writer.write(val)
88
+
89
+ def close(self):
90
+ return self._writer.close()
91
+
92
+ def getvalue(self):
93
+ return self._writer.getvalue()
94
+
95
+
96
+ def MessageToString(
97
+ message,
98
+ as_utf8=_as_utf8_default,
99
+ as_one_line=False,
100
+ use_short_repeated_primitives=False,
101
+ pointy_brackets=False,
102
+ use_index_order=False,
103
+ float_format=None,
104
+ double_format=None,
105
+ use_field_number=False,
106
+ descriptor_pool=None,
107
+ indent=0,
108
+ message_formatter=None,
109
+ print_unknown_fields=False,
110
+ force_colon=False) -> str:
111
+ """Convert protobuf message to text format.
112
+
113
+ Double values can be formatted compactly with 15 digits of
114
+ precision (which is the most that IEEE 754 "double" can guarantee)
115
+ using double_format='.15g'. To ensure that converting to text and back to a
116
+ proto will result in an identical value, double_format='.17g' should be used.
117
+
118
+ Args:
119
+ message: The protocol buffers message.
120
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
121
+ as_one_line: Don't introduce newlines between fields.
122
+ use_short_repeated_primitives: Use short repeated format for primitives.
123
+ pointy_brackets: If True, use angle brackets instead of curly braces for
124
+ nesting.
125
+ use_index_order: If True, fields of a proto message will be printed using
126
+ the order defined in source code instead of the field number, extensions
127
+ will be printed at the end of the message and their relative order is
128
+ determined by the extension number. By default, use the field number
129
+ order.
130
+ float_format (str): Deprecated. If set, use this to specify float field
131
+ formatting (per the "Format Specification Mini-Language"); otherwise,
132
+ shortest float that has same value in wire will be printed. Also affect
133
+ double field if double_format is not set but float_format is set.
134
+ double_format (str): Deprecated. If set, use this to specify double field
135
+ formatting (per the "Format Specification Mini-Language"); if it is not
136
+ set but float_format is set, use float_format. Otherwise, use ``str()``
137
+ use_field_number: If True, print field numbers instead of names.
138
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
139
+ indent (int): The initial indent level, in terms of spaces, for pretty
140
+ print.
141
+ message_formatter (function(message, indent, as_one_line) -> unicode|None):
142
+ Custom formatter for selected sub-messages (usually based on message
143
+ type). Use to pretty print parts of the protobuf for easier diffing.
144
+ print_unknown_fields: If True, unknown fields will be printed.
145
+ force_colon: If set, a colon will be added after the field name even if the
146
+ field is a proto message.
147
+
148
+ Returns:
149
+ str: A string of the text formatted protocol buffer message.
150
+ """
151
+ out = TextWriter(as_utf8)
152
+ printer = _Printer(
153
+ out,
154
+ indent,
155
+ as_utf8,
156
+ as_one_line,
157
+ use_short_repeated_primitives,
158
+ pointy_brackets,
159
+ use_index_order,
160
+ float_format,
161
+ double_format,
162
+ use_field_number,
163
+ descriptor_pool,
164
+ message_formatter,
165
+ print_unknown_fields=print_unknown_fields,
166
+ force_colon=force_colon)
167
+ printer.PrintMessage(message)
168
+ result = out.getvalue()
169
+ out.close()
170
+ if as_one_line:
171
+ return result.rstrip()
172
+ return result
173
+
174
+
175
+ def MessageToBytes(message, **kwargs) -> bytes:
176
+ """Convert protobuf message to encoded text format. See MessageToString."""
177
+ text = MessageToString(message, **kwargs)
178
+ if isinstance(text, bytes):
179
+ return text
180
+ codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii'
181
+ return text.encode(codec)
182
+
183
+
184
+ def _IsMapEntry(field):
185
+ return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
186
+ field.message_type.has_options and
187
+ field.message_type.GetOptions().map_entry)
188
+
189
+
190
+ def _IsGroupLike(field):
191
+ """Determines if a field is consistent with a proto2 group.
192
+
193
+ Args:
194
+ field: The field descriptor.
195
+
196
+ Returns:
197
+ True if this field is group-like, false otherwise.
198
+ """
199
+ # Groups are always tag-delimited.
200
+ if field.type != descriptor.FieldDescriptor.TYPE_GROUP:
201
+ return False
202
+
203
+ # Group fields always are always the lowercase type name.
204
+ if field.name != field.message_type.name.lower():
205
+ return False
206
+
207
+ if field.message_type.file != field.file:
208
+ return False
209
+
210
+ # Group messages are always defined in the same scope as the field. File
211
+ # level extensions will compare NULL == NULL here, which is why the file
212
+ # comparison above is necessary to ensure both come from the same file.
213
+ return (
214
+ field.message_type.containing_type == field.extension_scope
215
+ if field.is_extension
216
+ else field.message_type.containing_type == field.containing_type
217
+ )
218
+
219
+
220
+ def PrintMessage(message,
221
+ out,
222
+ indent=0,
223
+ as_utf8=_as_utf8_default,
224
+ as_one_line=False,
225
+ use_short_repeated_primitives=False,
226
+ pointy_brackets=False,
227
+ use_index_order=False,
228
+ float_format=None,
229
+ double_format=None,
230
+ use_field_number=False,
231
+ descriptor_pool=None,
232
+ message_formatter=None,
233
+ print_unknown_fields=False,
234
+ force_colon=False):
235
+ """Convert the message to text format and write it to the out stream.
236
+
237
+ Args:
238
+ message: The Message object to convert to text format.
239
+ out: A file handle to write the message to.
240
+ indent: The initial indent level for pretty print.
241
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
242
+ as_one_line: Don't introduce newlines between fields.
243
+ use_short_repeated_primitives: Use short repeated format for primitives.
244
+ pointy_brackets: If True, use angle brackets instead of curly braces for
245
+ nesting.
246
+ use_index_order: If True, print fields of a proto message using the order
247
+ defined in source code instead of the field number. By default, use the
248
+ field number order.
249
+ float_format: If set, use this to specify float field formatting
250
+ (per the "Format Specification Mini-Language"); otherwise, shortest
251
+ float that has same value in wire will be printed. Also affect double
252
+ field if double_format is not set but float_format is set.
253
+ double_format: If set, use this to specify double field formatting
254
+ (per the "Format Specification Mini-Language"); if it is not set but
255
+ float_format is set, use float_format. Otherwise, str() is used.
256
+ use_field_number: If True, print field numbers instead of names.
257
+ descriptor_pool: A DescriptorPool used to resolve Any types.
258
+ message_formatter: A function(message, indent, as_one_line): unicode|None
259
+ to custom format selected sub-messages (usually based on message type).
260
+ Use to pretty print parts of the protobuf for easier diffing.
261
+ print_unknown_fields: If True, unknown fields will be printed.
262
+ force_colon: If set, a colon will be added after the field name even if
263
+ the field is a proto message.
264
+ """
265
+ printer = _Printer(
266
+ out=out, indent=indent, as_utf8=as_utf8,
267
+ as_one_line=as_one_line,
268
+ use_short_repeated_primitives=use_short_repeated_primitives,
269
+ pointy_brackets=pointy_brackets,
270
+ use_index_order=use_index_order,
271
+ float_format=float_format,
272
+ double_format=double_format,
273
+ use_field_number=use_field_number,
274
+ descriptor_pool=descriptor_pool,
275
+ message_formatter=message_formatter,
276
+ print_unknown_fields=print_unknown_fields,
277
+ force_colon=force_colon)
278
+ printer.PrintMessage(message)
279
+
280
+
281
+ def PrintField(field,
282
+ value,
283
+ out,
284
+ indent=0,
285
+ as_utf8=_as_utf8_default,
286
+ as_one_line=False,
287
+ use_short_repeated_primitives=False,
288
+ pointy_brackets=False,
289
+ use_index_order=False,
290
+ float_format=None,
291
+ double_format=None,
292
+ message_formatter=None,
293
+ print_unknown_fields=False,
294
+ force_colon=False):
295
+ """Print a single field name/value pair."""
296
+ printer = _Printer(out, indent, as_utf8, as_one_line,
297
+ use_short_repeated_primitives, pointy_brackets,
298
+ use_index_order, float_format, double_format,
299
+ message_formatter=message_formatter,
300
+ print_unknown_fields=print_unknown_fields,
301
+ force_colon=force_colon)
302
+ printer.PrintField(field, value)
303
+
304
+
305
+ def PrintFieldValue(field,
306
+ value,
307
+ out,
308
+ indent=0,
309
+ as_utf8=_as_utf8_default,
310
+ as_one_line=False,
311
+ use_short_repeated_primitives=False,
312
+ pointy_brackets=False,
313
+ use_index_order=False,
314
+ float_format=None,
315
+ double_format=None,
316
+ message_formatter=None,
317
+ print_unknown_fields=False,
318
+ force_colon=False):
319
+ """Print a single field value (not including name)."""
320
+ printer = _Printer(out, indent, as_utf8, as_one_line,
321
+ use_short_repeated_primitives, pointy_brackets,
322
+ use_index_order, float_format, double_format,
323
+ message_formatter=message_formatter,
324
+ print_unknown_fields=print_unknown_fields,
325
+ force_colon=force_colon)
326
+ printer.PrintFieldValue(field, value)
327
+
328
+
329
+ def _BuildMessageFromTypeName(type_name, descriptor_pool):
330
+ """Returns a protobuf message instance.
331
+
332
+ Args:
333
+ type_name: Fully-qualified protobuf message type name string.
334
+ descriptor_pool: DescriptorPool instance.
335
+
336
+ Returns:
337
+ A Message instance of type matching type_name, or None if the a Descriptor
338
+ wasn't found matching type_name.
339
+ """
340
+ # pylint: disable=g-import-not-at-top
341
+ if descriptor_pool is None:
342
+ from google.protobuf import descriptor_pool as pool_mod
343
+ descriptor_pool = pool_mod.Default()
344
+ from google.protobuf import message_factory
345
+ try:
346
+ message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
347
+ except KeyError:
348
+ return None
349
+ message_type = message_factory.GetMessageClass(message_descriptor)
350
+ return message_type()
351
+
352
+
353
+ # These values must match WireType enum in //google/protobuf/wire_format.h.
354
+ WIRETYPE_LENGTH_DELIMITED = 2
355
+ WIRETYPE_START_GROUP = 3
356
+
357
+
358
+ class _Printer(object):
359
+ """Text format printer for protocol message."""
360
+
361
+ def __init__(
362
+ self,
363
+ out,
364
+ indent=0,
365
+ as_utf8=_as_utf8_default,
366
+ as_one_line=False,
367
+ use_short_repeated_primitives=False,
368
+ pointy_brackets=False,
369
+ use_index_order=False,
370
+ float_format=None,
371
+ double_format=None,
372
+ use_field_number=False,
373
+ descriptor_pool=None,
374
+ message_formatter=None,
375
+ print_unknown_fields=False,
376
+ force_colon=False):
377
+ """Initialize the Printer.
378
+
379
+ Double values can be formatted compactly with 15 digits of precision
380
+ (which is the most that IEEE 754 "double" can guarantee) using
381
+ double_format='.15g'. To ensure that converting to text and back to a proto
382
+ will result in an identical value, double_format='.17g' should be used.
383
+
384
+ Args:
385
+ out: To record the text format result.
386
+ indent: The initial indent level for pretty print.
387
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
388
+ as_one_line: Don't introduce newlines between fields.
389
+ use_short_repeated_primitives: Use short repeated format for primitives.
390
+ pointy_brackets: If True, use angle brackets instead of curly braces for
391
+ nesting.
392
+ use_index_order: If True, print fields of a proto message using the order
393
+ defined in source code instead of the field number. By default, use the
394
+ field number order.
395
+ float_format: Deprecated. If set, use this to specify float field
396
+ formatting (per the "Format Specification Mini-Language"); otherwise,
397
+ shortest float that has same value in wire will be printed. Also affect
398
+ double field if double_format is not set but float_format is set.
399
+ double_format: Deprecated. If set, use this to specify double field
400
+ formatting (per the "Format Specification Mini-Language"); if it is not
401
+ set but float_format is set, use float_format. Otherwise, str() is used.
402
+ use_field_number: If True, print field numbers instead of names.
403
+ descriptor_pool: A DescriptorPool used to resolve Any types.
404
+ message_formatter: A function(message, indent, as_one_line): unicode|None
405
+ to custom format selected sub-messages (usually based on message type).
406
+ Use to pretty print parts of the protobuf for easier diffing.
407
+ print_unknown_fields: If True, unknown fields will be printed.
408
+ force_colon: If set, a colon will be added after the field name even if
409
+ the field is a proto message.
410
+ """
411
+ self.out = out
412
+ self.indent = indent
413
+ self.as_utf8 = as_utf8
414
+ self.as_one_line = as_one_line
415
+ self.use_short_repeated_primitives = use_short_repeated_primitives
416
+ self.pointy_brackets = pointy_brackets
417
+ self.use_index_order = use_index_order
418
+ self.float_format = float_format
419
+ if double_format is not None:
420
+ warnings.warn(
421
+ 'double_format is deprecated for text_format. This will '
422
+ 'turn into error in 7.34.0, please remove it before that.'
423
+ )
424
+ self.double_format = double_format
425
+ else:
426
+ self.double_format = float_format
427
+ self.use_field_number = use_field_number
428
+ self.descriptor_pool = descriptor_pool
429
+ self.message_formatter = message_formatter
430
+ self.print_unknown_fields = print_unknown_fields
431
+ self.force_colon = force_colon
432
+
433
+ def _TryPrintAsAnyMessage(self, message):
434
+ """Serializes if message is a google.protobuf.Any field."""
435
+ if '/' not in message.type_url:
436
+ return False
437
+ packed_message = _BuildMessageFromTypeName(message.TypeName(),
438
+ self.descriptor_pool)
439
+ if packed_message is not None:
440
+ packed_message.MergeFromString(message.value)
441
+ colon = ':' if self.force_colon else ''
442
+ self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon))
443
+ self._PrintMessageFieldValue(packed_message)
444
+ self.out.write(' ' if self.as_one_line else '\n')
445
+ return True
446
+ else:
447
+ return False
448
+
449
+ def _TryCustomFormatMessage(self, message):
450
+ formatted = self.message_formatter(message, self.indent, self.as_one_line)
451
+ if formatted is None:
452
+ return False
453
+
454
+ out = self.out
455
+ out.write(' ' * self.indent)
456
+ out.write(formatted)
457
+ out.write(' ' if self.as_one_line else '\n')
458
+ return True
459
+
460
+ def PrintMessage(self, message):
461
+ """Convert protobuf message to text format.
462
+
463
+ Args:
464
+ message: The protocol buffers message.
465
+ """
466
+ if self.message_formatter and self._TryCustomFormatMessage(message):
467
+ return
468
+ if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and
469
+ self._TryPrintAsAnyMessage(message)):
470
+ return
471
+ fields = message.ListFields()
472
+ if self.use_index_order:
473
+ fields.sort(
474
+ key=lambda x: x[0].number if x[0].is_extension else x[0].index)
475
+ for field, value in fields:
476
+ if _IsMapEntry(field):
477
+ for key in sorted(value):
478
+ # This is slow for maps with submessage entries because it copies the
479
+ # entire tree. Unfortunately this would take significant refactoring
480
+ # of this file to work around.
481
+ #
482
+ # TODO: refactor and optimize if this becomes an issue.
483
+ entry_submsg = value.GetEntryClass()(key=key, value=value[key])
484
+ self.PrintField(field, entry_submsg)
485
+ elif field.is_repeated:
486
+ if (self.use_short_repeated_primitives
487
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE
488
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING):
489
+ self._PrintShortRepeatedPrimitivesValue(field, value)
490
+ else:
491
+ for element in value:
492
+ self.PrintField(field, element)
493
+ else:
494
+ self.PrintField(field, value)
495
+
496
+ if self.print_unknown_fields:
497
+ self._PrintUnknownFields(unknown_fields.UnknownFieldSet(message))
498
+
499
+ def _PrintUnknownFields(self, unknown_field_set):
500
+ """Print unknown fields."""
501
+ out = self.out
502
+ for field in unknown_field_set:
503
+ out.write(' ' * self.indent)
504
+ out.write(str(field.field_number))
505
+ if field.wire_type == WIRETYPE_START_GROUP:
506
+ if self.as_one_line:
507
+ out.write(' { ')
508
+ else:
509
+ out.write(' {\n')
510
+ self.indent += 2
511
+
512
+ self._PrintUnknownFields(field.data)
513
+
514
+ if self.as_one_line:
515
+ out.write('} ')
516
+ else:
517
+ self.indent -= 2
518
+ out.write(' ' * self.indent + '}\n')
519
+ elif field.wire_type == WIRETYPE_LENGTH_DELIMITED:
520
+ try:
521
+ # If this field is parseable as a Message, it is probably
522
+ # an embedded message.
523
+ # pylint: disable=protected-access
524
+ (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet(
525
+ memoryview(field.data), 0, len(field.data))
526
+ except Exception: # pylint: disable=broad-except
527
+ pos = 0
528
+
529
+ if pos == len(field.data):
530
+ if self.as_one_line:
531
+ out.write(' { ')
532
+ else:
533
+ out.write(' {\n')
534
+ self.indent += 2
535
+
536
+ self._PrintUnknownFields(embedded_unknown_message)
537
+
538
+ if self.as_one_line:
539
+ out.write('} ')
540
+ else:
541
+ self.indent -= 2
542
+ out.write(' ' * self.indent + '}\n')
543
+ else:
544
+ # A string or bytes field. self.as_utf8 may not work.
545
+ out.write(': \"')
546
+ out.write(text_encoding.CEscape(field.data, False))
547
+ out.write('\" ' if self.as_one_line else '\"\n')
548
+ else:
549
+ # varint, fixed32, fixed64
550
+ out.write(': ')
551
+ out.write(str(field.data))
552
+ out.write(' ' if self.as_one_line else '\n')
553
+
554
+ def _PrintFieldName(self, field):
555
+ """Print field name."""
556
+ out = self.out
557
+ out.write(' ' * self.indent)
558
+ if self.use_field_number:
559
+ out.write(str(field.number))
560
+ else:
561
+ if field.is_extension:
562
+ out.write('[')
563
+ if (field.containing_type.GetOptions().message_set_wire_format and
564
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
565
+ not field.is_required and
566
+ not field.is_repeated):
567
+ out.write(field.message_type.full_name)
568
+ else:
569
+ out.write(field.full_name)
570
+ out.write(']')
571
+ elif _IsGroupLike(field):
572
+ # For groups, use the capitalized name.
573
+ out.write(field.message_type.name)
574
+ else:
575
+ out.write(field.name)
576
+
577
+ if (self.force_colon or
578
+ field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE):
579
+ # The colon is optional in this case, but our cross-language golden files
580
+ # don't include it. Here, the colon is only included if force_colon is
581
+ # set to True
582
+ out.write(':')
583
+
584
+ def PrintField(self, field, value):
585
+ """Print a single field name/value pair."""
586
+ self._PrintFieldName(field)
587
+ self.out.write(' ')
588
+ self.PrintFieldValue(field, value)
589
+ self.out.write(' ' if self.as_one_line else '\n')
590
+
591
+ def _PrintShortRepeatedPrimitivesValue(self, field, value):
592
+ """"Prints short repeated primitives value."""
593
+ # Note: this is called only when value has at least one element.
594
+ self._PrintFieldName(field)
595
+ self.out.write(' [')
596
+ for i in range(len(value) - 1):
597
+ self.PrintFieldValue(field, value[i])
598
+ self.out.write(', ')
599
+ self.PrintFieldValue(field, value[-1])
600
+ self.out.write(']')
601
+ self.out.write(' ' if self.as_one_line else '\n')
602
+
603
+ def _PrintMessageFieldValue(self, value):
604
+ if self.pointy_brackets:
605
+ openb = '<'
606
+ closeb = '>'
607
+ else:
608
+ openb = '{'
609
+ closeb = '}'
610
+
611
+ if self.as_one_line:
612
+ self.out.write('%s ' % openb)
613
+ self.PrintMessage(value)
614
+ self.out.write(closeb)
615
+ else:
616
+ self.out.write('%s\n' % openb)
617
+ self.indent += 2
618
+ self.PrintMessage(value)
619
+ self.indent -= 2
620
+ self.out.write(' ' * self.indent + closeb)
621
+
622
+ def PrintFieldValue(self, field, value):
623
+ """Print a single field value (not including name).
624
+
625
+ For repeated fields, the value should be a single element.
626
+
627
+ Args:
628
+ field: The descriptor of the field to be printed.
629
+ value: The value of the field.
630
+ """
631
+ out = self.out
632
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
633
+ self._PrintMessageFieldValue(value)
634
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
635
+ enum_value = field.enum_type.values_by_number.get(value, None)
636
+ if enum_value is not None:
637
+ out.write(enum_value.name)
638
+ else:
639
+ out.write(str(value))
640
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
641
+ out.write('\"')
642
+ if isinstance(value, str) and not self.as_utf8:
643
+ out_value = value.encode('utf-8')
644
+ else:
645
+ out_value = value
646
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
647
+ # We always need to escape all binary data in TYPE_BYTES fields.
648
+ out_as_utf8 = False
649
+ else:
650
+ out_as_utf8 = self.as_utf8
651
+ out.write(text_encoding.CEscape(out_value, out_as_utf8))
652
+ out.write('\"')
653
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
654
+ if value:
655
+ out.write('true')
656
+ else:
657
+ out.write('false')
658
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
659
+ if self.float_format is not None:
660
+ warnings.warn(
661
+ 'float_format is deprecated for text_format. This '
662
+ 'will turn into error in 7.34.0, please remove it '
663
+ 'before that.'
664
+ )
665
+ out.write('{1:{0}}'.format(self.float_format, value))
666
+ else:
667
+ if math.isnan(value):
668
+ out.write(str(value))
669
+ else:
670
+ out.write(str(type_checkers.ToShortestFloat(value)))
671
+ elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and
672
+ self.double_format is not None):
673
+ out.write('{1:{0}}'.format(self.double_format, value))
674
+ else:
675
+ out.write(str(value))
676
+
677
+
678
+ def Parse(text,
679
+ message,
680
+ allow_unknown_extension=False,
681
+ allow_field_number=False,
682
+ descriptor_pool=None,
683
+ allow_unknown_field=False):
684
+ """Parses a text representation of a protocol message into a message.
685
+
686
+ NOTE: for historical reasons this function does not clear the input
687
+ message. This is different from what the binary msg.ParseFrom(...) does.
688
+ If text contains a field already set in message, the value is appended if the
689
+ field is repeated. Otherwise, an error is raised.
690
+
691
+ Example::
692
+
693
+ a = MyProto()
694
+ a.repeated_field.append('test')
695
+ b = MyProto()
696
+
697
+ # Repeated fields are combined
698
+ text_format.Parse(repr(a), b)
699
+ text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"]
700
+
701
+ # Non-repeated fields cannot be overwritten
702
+ a.singular_field = 1
703
+ b.singular_field = 2
704
+ text_format.Parse(repr(a), b) # ParseError
705
+
706
+ # Binary version:
707
+ b.ParseFromString(a.SerializeToString()) # repeated_field is now "test"
708
+
709
+ Caller is responsible for clearing the message as needed.
710
+
711
+ Args:
712
+ text (str): Message text representation.
713
+ message (Message): A protocol buffer message to merge into.
714
+ allow_unknown_extension: if True, skip over missing extensions and keep
715
+ parsing
716
+ allow_field_number: if True, both field number and field name are allowed.
717
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
718
+ allow_unknown_field: if True, skip over unknown field and keep
719
+ parsing. Avoid to use this option if possible. It may hide some
720
+ errors (e.g. spelling error on field name)
721
+
722
+ Returns:
723
+ Message: The same message passed as argument.
724
+
725
+ Raises:
726
+ ParseError: On text parsing problems.
727
+ """
728
+ return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'),
729
+ message,
730
+ allow_unknown_extension,
731
+ allow_field_number,
732
+ descriptor_pool=descriptor_pool,
733
+ allow_unknown_field=allow_unknown_field)
734
+
735
+
736
+ def Merge(text,
737
+ message,
738
+ allow_unknown_extension=False,
739
+ allow_field_number=False,
740
+ descriptor_pool=None,
741
+ allow_unknown_field=False):
742
+ """Parses a text representation of a protocol message into a message.
743
+
744
+ Like Parse(), but allows repeated values for a non-repeated field, and uses
745
+ the last one. This means any non-repeated, top-level fields specified in text
746
+ replace those in the message.
747
+
748
+ Args:
749
+ text (str): Message text representation.
750
+ message (Message): A protocol buffer message to merge into.
751
+ allow_unknown_extension: if True, skip over missing extensions and keep
752
+ parsing
753
+ allow_field_number: if True, both field number and field name are allowed.
754
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
755
+ allow_unknown_field: if True, skip over unknown field and keep
756
+ parsing. Avoid to use this option if possible. It may hide some
757
+ errors (e.g. spelling error on field name)
758
+
759
+ Returns:
760
+ Message: The same message passed as argument.
761
+
762
+ Raises:
763
+ ParseError: On text parsing problems.
764
+ """
765
+ return MergeLines(
766
+ text.split(b'\n' if isinstance(text, bytes) else u'\n'),
767
+ message,
768
+ allow_unknown_extension,
769
+ allow_field_number,
770
+ descriptor_pool=descriptor_pool,
771
+ allow_unknown_field=allow_unknown_field)
772
+
773
+
774
+ def ParseLines(lines,
775
+ message,
776
+ allow_unknown_extension=False,
777
+ allow_field_number=False,
778
+ descriptor_pool=None,
779
+ allow_unknown_field=False):
780
+ """Parses a text representation of a protocol message into a message.
781
+
782
+ See Parse() for caveats.
783
+
784
+ Args:
785
+ lines: An iterable of lines of a message's text representation.
786
+ message: A protocol buffer message to merge into.
787
+ allow_unknown_extension: if True, skip over missing extensions and keep
788
+ parsing
789
+ allow_field_number: if True, both field number and field name are allowed.
790
+ descriptor_pool: A DescriptorPool used to resolve Any types.
791
+ allow_unknown_field: if True, skip over unknown field and keep
792
+ parsing. Avoid to use this option if possible. It may hide some
793
+ errors (e.g. spelling error on field name)
794
+
795
+ Returns:
796
+ The same message passed as argument.
797
+
798
+ Raises:
799
+ ParseError: On text parsing problems.
800
+ """
801
+ parser = _Parser(allow_unknown_extension,
802
+ allow_field_number,
803
+ descriptor_pool=descriptor_pool,
804
+ allow_unknown_field=allow_unknown_field)
805
+ return parser.ParseLines(lines, message)
806
+
807
+
808
+ def MergeLines(lines,
809
+ message,
810
+ allow_unknown_extension=False,
811
+ allow_field_number=False,
812
+ descriptor_pool=None,
813
+ allow_unknown_field=False):
814
+ """Parses a text representation of a protocol message into a message.
815
+
816
+ See Merge() for more details.
817
+
818
+ Args:
819
+ lines: An iterable of lines of a message's text representation.
820
+ message: A protocol buffer message to merge into.
821
+ allow_unknown_extension: if True, skip over missing extensions and keep
822
+ parsing
823
+ allow_field_number: if True, both field number and field name are allowed.
824
+ descriptor_pool: A DescriptorPool used to resolve Any types.
825
+ allow_unknown_field: if True, skip over unknown field and keep
826
+ parsing. Avoid to use this option if possible. It may hide some
827
+ errors (e.g. spelling error on field name)
828
+
829
+ Returns:
830
+ The same message passed as argument.
831
+
832
+ Raises:
833
+ ParseError: On text parsing problems.
834
+ """
835
+ parser = _Parser(allow_unknown_extension,
836
+ allow_field_number,
837
+ descriptor_pool=descriptor_pool,
838
+ allow_unknown_field=allow_unknown_field)
839
+ return parser.MergeLines(lines, message)
840
+
841
+
842
+ class _Parser(object):
843
+ """Text format parser for protocol message."""
844
+
845
+ def __init__(self,
846
+ allow_unknown_extension=False,
847
+ allow_field_number=False,
848
+ descriptor_pool=None,
849
+ allow_unknown_field=False):
850
+ self.allow_unknown_extension = allow_unknown_extension
851
+ self.allow_field_number = allow_field_number
852
+ self.descriptor_pool = descriptor_pool
853
+ self.allow_unknown_field = allow_unknown_field
854
+
855
+ def ParseLines(self, lines, message):
856
+ """Parses a text representation of a protocol message into a message."""
857
+ self._allow_multiple_scalars = False
858
+ self._ParseOrMerge(lines, message)
859
+ return message
860
+
861
+ def MergeLines(self, lines, message):
862
+ """Merges a text representation of a protocol message into a message."""
863
+ self._allow_multiple_scalars = True
864
+ self._ParseOrMerge(lines, message)
865
+ return message
866
+
867
+ def _ParseOrMerge(self, lines, message):
868
+ """Converts a text representation of a protocol message into a message.
869
+
870
+ Args:
871
+ lines: Lines of a message's text representation.
872
+ message: A protocol buffer message to merge into.
873
+
874
+ Raises:
875
+ ParseError: On text parsing problems.
876
+ """
877
+ # Tokenize expects native str lines.
878
+ try:
879
+ str_lines = (
880
+ line if isinstance(line, str) else line.decode('utf-8')
881
+ for line in lines)
882
+ tokenizer = Tokenizer(str_lines)
883
+ except UnicodeDecodeError as e:
884
+ raise ParseError from e
885
+ if message:
886
+ self.root_type = message.DESCRIPTOR.full_name
887
+ while not tokenizer.AtEnd():
888
+ self._MergeField(tokenizer, message)
889
+
890
+ def _MergeField(self, tokenizer, message):
891
+ """Merges a single protocol message field into a message.
892
+
893
+ Args:
894
+ tokenizer: A tokenizer to parse the field name and values.
895
+ message: A protocol message to record the data.
896
+
897
+ Raises:
898
+ ParseError: In case of text parsing problems.
899
+ """
900
+ message_descriptor = message.DESCRIPTOR
901
+ if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and
902
+ tokenizer.TryConsume('[')):
903
+ type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
904
+ tokenizer.Consume(']')
905
+ tokenizer.TryConsume(':')
906
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
907
+ type_url_prefix + '/' + packed_type_name)
908
+ if tokenizer.TryConsume('<'):
909
+ expanded_any_end_token = '>'
910
+ else:
911
+ tokenizer.Consume('{')
912
+ expanded_any_end_token = '}'
913
+ expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
914
+ self.descriptor_pool)
915
+ # Direct comparison with None is used instead of implicit bool conversion
916
+ # to avoid false positives with falsy initial values, e.g. for
917
+ # google.protobuf.ListValue.
918
+ if expanded_any_sub_message is None:
919
+ raise ParseError('Type %s not found in descriptor pool' %
920
+ packed_type_name)
921
+ while not tokenizer.TryConsume(expanded_any_end_token):
922
+ if tokenizer.AtEnd():
923
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
924
+ (expanded_any_end_token,))
925
+ self._MergeField(tokenizer, expanded_any_sub_message)
926
+ deterministic = False
927
+
928
+ message.Pack(expanded_any_sub_message,
929
+ type_url_prefix=type_url_prefix,
930
+ deterministic=deterministic)
931
+ return
932
+
933
+ if tokenizer.TryConsume('['):
934
+ name = [tokenizer.ConsumeIdentifier()]
935
+ while tokenizer.TryConsume('.'):
936
+ name.append(tokenizer.ConsumeIdentifier())
937
+ name = '.'.join(name)
938
+
939
+ if not message_descriptor.is_extendable:
940
+ raise tokenizer.ParseErrorPreviousToken(
941
+ 'Message type "%s" does not have extensions.' %
942
+ message_descriptor.full_name)
943
+ # pylint: disable=protected-access
944
+ field = message.Extensions._FindExtensionByName(name)
945
+ # pylint: enable=protected-access
946
+ if not field:
947
+ if self.allow_unknown_extension:
948
+ field = None
949
+ else:
950
+ raise tokenizer.ParseErrorPreviousToken(
951
+ 'Extension "%s" not registered. '
952
+ 'Did you import the _pb2 module which defines it? '
953
+ 'If you are trying to place the extension in the MessageSet '
954
+ 'field of another message that is in an Any or MessageSet field, '
955
+ 'that message\'s _pb2 module must be imported as well' % name)
956
+ elif message_descriptor != field.containing_type:
957
+ raise tokenizer.ParseErrorPreviousToken(
958
+ 'Extension "%s" does not extend message type "%s".' %
959
+ (name, message_descriptor.full_name))
960
+
961
+ tokenizer.Consume(']')
962
+
963
+ else:
964
+ name = tokenizer.ConsumeIdentifierOrNumber()
965
+ if self.allow_field_number and name.isdigit():
966
+ number = ParseInteger(name, True, True)
967
+ field = message_descriptor.fields_by_number.get(number, None)
968
+ if not field and message_descriptor.is_extendable:
969
+ field = message.Extensions._FindExtensionByNumber(number)
970
+ else:
971
+ field = message_descriptor.fields_by_name.get(name, None)
972
+
973
+ # Group names are expected to be capitalized as they appear in the
974
+ # .proto file, which actually matches their type names, not their field
975
+ # names.
976
+ if not field:
977
+ field = message_descriptor.fields_by_name.get(name.lower(), None)
978
+ if field and not _IsGroupLike(field):
979
+ field = None
980
+ if field and field.message_type.name != name:
981
+ field = None
982
+
983
+ if not field and not self.allow_unknown_field:
984
+ raise tokenizer.ParseErrorPreviousToken(
985
+ 'Message type "%s" has no field named "%s".' %
986
+ (message_descriptor.full_name, name))
987
+
988
+ if field:
989
+ if not self._allow_multiple_scalars and field.containing_oneof:
990
+ # Check if there's a different field set in this oneof.
991
+ # Note that we ignore the case if the same field was set before, and we
992
+ # apply _allow_multiple_scalars to non-scalar fields as well.
993
+ which_oneof = message.WhichOneof(field.containing_oneof.name)
994
+ if which_oneof is not None and which_oneof != field.name:
995
+ raise tokenizer.ParseErrorPreviousToken(
996
+ 'Field "%s" is specified along with field "%s", another member '
997
+ 'of oneof "%s" for message type "%s".' %
998
+ (field.name, which_oneof, field.containing_oneof.name,
999
+ message_descriptor.full_name))
1000
+
1001
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
1002
+ tokenizer.TryConsume(':')
1003
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
1004
+ field.full_name)
1005
+ merger = self._MergeMessageField
1006
+ else:
1007
+ tokenizer.Consume(':')
1008
+ self._DetectSilentMarker(tokenizer, message_descriptor.full_name,
1009
+ field.full_name)
1010
+ merger = self._MergeScalarField
1011
+
1012
+ if (field.is_repeated and
1013
+ tokenizer.TryConsume('[')):
1014
+ # Short repeated format, e.g. "foo: [1, 2, 3]"
1015
+ if not tokenizer.TryConsume(']'):
1016
+ while True:
1017
+ merger(tokenizer, message, field)
1018
+ if tokenizer.TryConsume(']'):
1019
+ break
1020
+ tokenizer.Consume(',')
1021
+
1022
+ else:
1023
+ merger(tokenizer, message, field)
1024
+
1025
+ else: # Proto field is unknown.
1026
+ assert (self.allow_unknown_extension or self.allow_unknown_field)
1027
+ self._SkipFieldContents(tokenizer, name, message_descriptor.full_name)
1028
+
1029
+ # For historical reasons, fields may optionally be separated by commas or
1030
+ # semicolons.
1031
+ if not tokenizer.TryConsume(','):
1032
+ tokenizer.TryConsume(';')
1033
+
1034
+ def _LogSilentMarker(self, immediate_message_type, field_name):
1035
+ pass
1036
+
1037
+ def _DetectSilentMarker(self, tokenizer, immediate_message_type, field_name):
1038
+ if tokenizer.contains_silent_marker_before_current_token:
1039
+ self._LogSilentMarker(immediate_message_type, field_name)
1040
+
1041
+ def _ConsumeAnyTypeUrl(self, tokenizer):
1042
+ """Consumes a google.protobuf.Any type URL and returns the type name."""
1043
+ # Consume "type.googleapis.com/".
1044
+ prefix = [tokenizer.ConsumeIdentifier()]
1045
+ tokenizer.Consume('.')
1046
+ prefix.append(tokenizer.ConsumeIdentifier())
1047
+ tokenizer.Consume('.')
1048
+ prefix.append(tokenizer.ConsumeIdentifier())
1049
+ tokenizer.Consume('/')
1050
+ # Consume the fully-qualified type name.
1051
+ name = [tokenizer.ConsumeIdentifier()]
1052
+ while tokenizer.TryConsume('.'):
1053
+ name.append(tokenizer.ConsumeIdentifier())
1054
+ return '.'.join(prefix), '.'.join(name)
1055
+
1056
+ def _MergeMessageField(self, tokenizer, message, field):
1057
+ """Merges a single scalar field into a message.
1058
+
1059
+ Args:
1060
+ tokenizer: A tokenizer to parse the field value.
1061
+ message: The message of which field is a member.
1062
+ field: The descriptor of the field to be merged.
1063
+
1064
+ Raises:
1065
+ ParseError: In case of text parsing problems.
1066
+ """
1067
+ is_map_entry = _IsMapEntry(field)
1068
+
1069
+ if tokenizer.TryConsume('<'):
1070
+ end_token = '>'
1071
+ else:
1072
+ tokenizer.Consume('{')
1073
+ end_token = '}'
1074
+
1075
+ if field.is_repeated:
1076
+ if field.is_extension:
1077
+ sub_message = message.Extensions[field].add()
1078
+ elif is_map_entry:
1079
+ sub_message = getattr(message, field.name).GetEntryClass()()
1080
+ else:
1081
+ sub_message = getattr(message, field.name).add()
1082
+ else:
1083
+ if field.is_extension:
1084
+ if (not self._allow_multiple_scalars and
1085
+ message.HasExtension(field)):
1086
+ raise tokenizer.ParseErrorPreviousToken(
1087
+ 'Message type "%s" should not have multiple "%s" extensions.' %
1088
+ (message.DESCRIPTOR.full_name, field.full_name))
1089
+ sub_message = message.Extensions[field]
1090
+ else:
1091
+ # Also apply _allow_multiple_scalars to message field.
1092
+ # TODO: Change to _allow_singular_overwrites.
1093
+ if (not self._allow_multiple_scalars and
1094
+ message.HasField(field.name)):
1095
+ raise tokenizer.ParseErrorPreviousToken(
1096
+ 'Message type "%s" should not have multiple "%s" fields.' %
1097
+ (message.DESCRIPTOR.full_name, field.name))
1098
+ sub_message = getattr(message, field.name)
1099
+ sub_message.SetInParent()
1100
+
1101
+ while not tokenizer.TryConsume(end_token):
1102
+ if tokenizer.AtEnd():
1103
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
1104
+ self._MergeField(tokenizer, sub_message)
1105
+
1106
+ if is_map_entry:
1107
+ value_cpptype = field.message_type.fields_by_name['value'].cpp_type
1108
+ if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
1109
+ value = getattr(message, field.name)[sub_message.key]
1110
+ value.CopyFrom(sub_message.value)
1111
+ else:
1112
+ getattr(message, field.name)[sub_message.key] = sub_message.value
1113
+
1114
+ def _MergeScalarField(self, tokenizer, message, field):
1115
+ """Merges a single scalar field into a message.
1116
+
1117
+ Args:
1118
+ tokenizer: A tokenizer to parse the field value.
1119
+ message: A protocol message to record the data.
1120
+ field: The descriptor of the field to be merged.
1121
+
1122
+ Raises:
1123
+ ParseError: In case of text parsing problems.
1124
+ RuntimeError: On runtime errors.
1125
+ """
1126
+ _ = self.allow_unknown_extension
1127
+ value = None
1128
+
1129
+ if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
1130
+ descriptor.FieldDescriptor.TYPE_SINT32,
1131
+ descriptor.FieldDescriptor.TYPE_SFIXED32):
1132
+ value = _ConsumeInt32(tokenizer)
1133
+ elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
1134
+ descriptor.FieldDescriptor.TYPE_SINT64,
1135
+ descriptor.FieldDescriptor.TYPE_SFIXED64):
1136
+ value = _ConsumeInt64(tokenizer)
1137
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
1138
+ descriptor.FieldDescriptor.TYPE_FIXED32):
1139
+ value = _ConsumeUint32(tokenizer)
1140
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
1141
+ descriptor.FieldDescriptor.TYPE_FIXED64):
1142
+ value = _ConsumeUint64(tokenizer)
1143
+ elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
1144
+ descriptor.FieldDescriptor.TYPE_DOUBLE):
1145
+ value = tokenizer.ConsumeFloat()
1146
+ elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
1147
+ value = tokenizer.ConsumeBool()
1148
+ elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
1149
+ value = tokenizer.ConsumeString()
1150
+ elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
1151
+ value = tokenizer.ConsumeByteString()
1152
+ elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
1153
+ value = tokenizer.ConsumeEnum(field)
1154
+ else:
1155
+ raise RuntimeError('Unknown field type %d' % field.type)
1156
+
1157
+ if field.is_repeated:
1158
+ if field.is_extension:
1159
+ message.Extensions[field].append(value)
1160
+ else:
1161
+ getattr(message, field.name).append(value)
1162
+ else:
1163
+ if field.is_extension:
1164
+ if (not self._allow_multiple_scalars and
1165
+ field.has_presence and
1166
+ message.HasExtension(field)):
1167
+ raise tokenizer.ParseErrorPreviousToken(
1168
+ 'Message type "%s" should not have multiple "%s" extensions.' %
1169
+ (message.DESCRIPTOR.full_name, field.full_name))
1170
+ else:
1171
+ message.Extensions[field] = value
1172
+ else:
1173
+ duplicate_error = False
1174
+ if not self._allow_multiple_scalars:
1175
+ if field.has_presence:
1176
+ duplicate_error = message.HasField(field.name)
1177
+ else:
1178
+ # For field that doesn't represent presence, try best effort to
1179
+ # check multiple scalars by compare to default values.
1180
+ duplicate_error = not decoder.IsDefaultScalarValue(
1181
+ getattr(message, field.name)
1182
+ )
1183
+
1184
+ if duplicate_error:
1185
+ raise tokenizer.ParseErrorPreviousToken(
1186
+ 'Message type "%s" should not have multiple "%s" fields.' %
1187
+ (message.DESCRIPTOR.full_name, field.name))
1188
+ else:
1189
+ setattr(message, field.name, value)
1190
+
1191
+ def _SkipFieldContents(self, tokenizer, field_name, immediate_message_type):
1192
+ """Skips over contents (value or message) of a field.
1193
+
1194
+ Args:
1195
+ tokenizer: A tokenizer to parse the field name and values.
1196
+ field_name: The field name currently being parsed.
1197
+ immediate_message_type: The type of the message immediately containing
1198
+ the silent marker.
1199
+ """
1200
+ # Try to guess the type of this field.
1201
+ # If this field is not a message, there should be a ":" between the
1202
+ # field name and the field value and also the field value should not
1203
+ # start with "{" or "<" which indicates the beginning of a message body.
1204
+ # If there is no ":" or there is a "{" or "<" after ":", this field has
1205
+ # to be a message or the input is ill-formed.
1206
+ if tokenizer.TryConsume(
1207
+ ':') and not tokenizer.LookingAt('{') and not tokenizer.LookingAt('<'):
1208
+ self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
1209
+ if tokenizer.LookingAt('['):
1210
+ self._SkipRepeatedFieldValue(tokenizer, immediate_message_type)
1211
+ else:
1212
+ self._SkipFieldValue(tokenizer)
1213
+ else:
1214
+ self._DetectSilentMarker(tokenizer, immediate_message_type, field_name)
1215
+ self._SkipFieldMessage(tokenizer, immediate_message_type)
1216
+
1217
+ def _SkipField(self, tokenizer, immediate_message_type):
1218
+ """Skips over a complete field (name and value/message).
1219
+
1220
+ Args:
1221
+ tokenizer: A tokenizer to parse the field name and values.
1222
+ immediate_message_type: The type of the message immediately containing
1223
+ the silent marker.
1224
+ """
1225
+ field_name = ''
1226
+ if tokenizer.TryConsume('['):
1227
+ # Consume extension or google.protobuf.Any type URL
1228
+ field_name += '[' + tokenizer.ConsumeIdentifier()
1229
+ num_identifiers = 1
1230
+ while tokenizer.TryConsume('.'):
1231
+ field_name += '.' + tokenizer.ConsumeIdentifier()
1232
+ num_identifiers += 1
1233
+ # This is possibly a type URL for an Any message.
1234
+ if num_identifiers == 3 and tokenizer.TryConsume('/'):
1235
+ field_name += '/' + tokenizer.ConsumeIdentifier()
1236
+ while tokenizer.TryConsume('.'):
1237
+ field_name += '.' + tokenizer.ConsumeIdentifier()
1238
+ tokenizer.Consume(']')
1239
+ field_name += ']'
1240
+ else:
1241
+ field_name += tokenizer.ConsumeIdentifierOrNumber()
1242
+
1243
+ self._SkipFieldContents(tokenizer, field_name, immediate_message_type)
1244
+
1245
+ # For historical reasons, fields may optionally be separated by commas or
1246
+ # semicolons.
1247
+ if not tokenizer.TryConsume(','):
1248
+ tokenizer.TryConsume(';')
1249
+
1250
+ def _SkipFieldMessage(self, tokenizer, immediate_message_type):
1251
+ """Skips over a field message.
1252
+
1253
+ Args:
1254
+ tokenizer: A tokenizer to parse the field name and values.
1255
+ immediate_message_type: The type of the message immediately containing
1256
+ the silent marker
1257
+ """
1258
+ if tokenizer.TryConsume('<'):
1259
+ delimiter = '>'
1260
+ else:
1261
+ tokenizer.Consume('{')
1262
+ delimiter = '}'
1263
+
1264
+ while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
1265
+ self._SkipField(tokenizer, immediate_message_type)
1266
+
1267
+ tokenizer.Consume(delimiter)
1268
+
1269
+ def _SkipFieldValue(self, tokenizer):
1270
+ """Skips over a field value.
1271
+
1272
+ Args:
1273
+ tokenizer: A tokenizer to parse the field name and values.
1274
+
1275
+ Raises:
1276
+ ParseError: In case an invalid field value is found.
1277
+ """
1278
+ if (not tokenizer.TryConsumeByteString()and
1279
+ not tokenizer.TryConsumeIdentifier() and
1280
+ not _TryConsumeInt64(tokenizer) and
1281
+ not _TryConsumeUint64(tokenizer) and
1282
+ not tokenizer.TryConsumeFloat()):
1283
+ raise ParseError('Invalid field value: ' + tokenizer.token)
1284
+
1285
+ def _SkipRepeatedFieldValue(self, tokenizer, immediate_message_type):
1286
+ """Skips over a repeated field value.
1287
+
1288
+ Args:
1289
+ tokenizer: A tokenizer to parse the field value.
1290
+ """
1291
+ tokenizer.Consume('[')
1292
+ if not tokenizer.TryConsume(']'):
1293
+ while True:
1294
+ if tokenizer.LookingAt('<') or tokenizer.LookingAt('{'):
1295
+ self._SkipFieldMessage(tokenizer, immediate_message_type)
1296
+ else:
1297
+ self._SkipFieldValue(tokenizer)
1298
+ if tokenizer.TryConsume(']'):
1299
+ break
1300
+ tokenizer.Consume(',')
1301
+
1302
+
1303
+ class Tokenizer(object):
1304
+ """Protocol buffer text representation tokenizer.
1305
+
1306
+ This class handles the lower level string parsing by splitting it into
1307
+ meaningful tokens.
1308
+
1309
+ It was directly ported from the Java protocol buffer API.
1310
+ """
1311
+
1312
+ _WHITESPACE = re.compile(r'\s+')
1313
+ _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE)
1314
+ _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE)
1315
+ _TOKEN = re.compile('|'.join([
1316
+ r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier
1317
+ r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number
1318
+ ] + [ # quoted str for each quote mark
1319
+ # Avoid backtracking! https://stackoverflow.com/a/844267
1320
+ r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark)
1321
+ for mark in _QUOTES
1322
+ ]))
1323
+
1324
+ _IDENTIFIER = re.compile(r'[^\d\W]\w*')
1325
+ _IDENTIFIER_OR_NUMBER = re.compile(r'\w+')
1326
+
1327
+ def __init__(self, lines, skip_comments=True):
1328
+ self._position = 0
1329
+ self._line = -1
1330
+ self._column = 0
1331
+ self._token_start = None
1332
+ self.token = ''
1333
+ self._lines = iter(lines)
1334
+ self._current_line = ''
1335
+ self._previous_line = 0
1336
+ self._previous_column = 0
1337
+ self._more_lines = True
1338
+ self._skip_comments = skip_comments
1339
+ self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT
1340
+ or self._WHITESPACE)
1341
+ self.contains_silent_marker_before_current_token = False
1342
+
1343
+ self._SkipWhitespace()
1344
+ self.NextToken()
1345
+
1346
+ def LookingAt(self, token):
1347
+ return self.token == token
1348
+
1349
+ def AtEnd(self):
1350
+ """Checks the end of the text was reached.
1351
+
1352
+ Returns:
1353
+ True iff the end was reached.
1354
+ """
1355
+ return not self.token
1356
+
1357
+ def _PopLine(self):
1358
+ while len(self._current_line) <= self._column:
1359
+ try:
1360
+ self._current_line = next(self._lines)
1361
+ except StopIteration:
1362
+ self._current_line = ''
1363
+ self._more_lines = False
1364
+ return
1365
+ else:
1366
+ self._line += 1
1367
+ self._column = 0
1368
+
1369
+ def _SkipWhitespace(self):
1370
+ while True:
1371
+ self._PopLine()
1372
+ match = self._whitespace_pattern.match(self._current_line, self._column)
1373
+ if not match:
1374
+ break
1375
+ self.contains_silent_marker_before_current_token = match.group(0) == (
1376
+ ' ' + _DEBUG_STRING_SILENT_MARKER)
1377
+ length = len(match.group(0))
1378
+ self._column += length
1379
+
1380
+ def TryConsume(self, token):
1381
+ """Tries to consume a given piece of text.
1382
+
1383
+ Args:
1384
+ token: Text to consume.
1385
+
1386
+ Returns:
1387
+ True iff the text was consumed.
1388
+ """
1389
+ if self.token == token:
1390
+ self.NextToken()
1391
+ return True
1392
+ return False
1393
+
1394
+ def Consume(self, token):
1395
+ """Consumes a piece of text.
1396
+
1397
+ Args:
1398
+ token: Text to consume.
1399
+
1400
+ Raises:
1401
+ ParseError: If the text couldn't be consumed.
1402
+ """
1403
+ if not self.TryConsume(token):
1404
+ raise self.ParseError('Expected "%s".' % token)
1405
+
1406
+ def ConsumeComment(self):
1407
+ result = self.token
1408
+ if not self._COMMENT.match(result):
1409
+ raise self.ParseError('Expected comment.')
1410
+ self.NextToken()
1411
+ return result
1412
+
1413
+ def ConsumeCommentOrTrailingComment(self):
1414
+ """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
1415
+
1416
+ # Tokenizer initializes _previous_line and _previous_column to 0. As the
1417
+ # tokenizer starts, it looks like there is a previous token on the line.
1418
+ just_started = self._line == 0 and self._column == 0
1419
+
1420
+ before_parsing = self._previous_line
1421
+ comment = self.ConsumeComment()
1422
+
1423
+ # A trailing comment is a comment on the same line than the previous token.
1424
+ trailing = (self._previous_line == before_parsing
1425
+ and not just_started)
1426
+
1427
+ return trailing, comment
1428
+
1429
+ def TryConsumeIdentifier(self):
1430
+ try:
1431
+ self.ConsumeIdentifier()
1432
+ return True
1433
+ except ParseError:
1434
+ return False
1435
+
1436
+ def ConsumeIdentifier(self):
1437
+ """Consumes protocol message field identifier.
1438
+
1439
+ Returns:
1440
+ Identifier string.
1441
+
1442
+ Raises:
1443
+ ParseError: If an identifier couldn't be consumed.
1444
+ """
1445
+ result = self.token
1446
+ if not self._IDENTIFIER.match(result):
1447
+ raise self.ParseError('Expected identifier.')
1448
+ self.NextToken()
1449
+ return result
1450
+
1451
+ def TryConsumeIdentifierOrNumber(self):
1452
+ try:
1453
+ self.ConsumeIdentifierOrNumber()
1454
+ return True
1455
+ except ParseError:
1456
+ return False
1457
+
1458
+ def ConsumeIdentifierOrNumber(self):
1459
+ """Consumes protocol message field identifier.
1460
+
1461
+ Returns:
1462
+ Identifier string.
1463
+
1464
+ Raises:
1465
+ ParseError: If an identifier couldn't be consumed.
1466
+ """
1467
+ result = self.token
1468
+ if not self._IDENTIFIER_OR_NUMBER.match(result):
1469
+ raise self.ParseError('Expected identifier or number, got %s.' % result)
1470
+ self.NextToken()
1471
+ return result
1472
+
1473
+ def TryConsumeInteger(self):
1474
+ try:
1475
+ self.ConsumeInteger()
1476
+ return True
1477
+ except ParseError:
1478
+ return False
1479
+
1480
+ def ConsumeInteger(self):
1481
+ """Consumes an integer number.
1482
+
1483
+ Returns:
1484
+ The integer parsed.
1485
+
1486
+ Raises:
1487
+ ParseError: If an integer couldn't be consumed.
1488
+ """
1489
+ try:
1490
+ result = _ParseAbstractInteger(self.token)
1491
+ except ValueError as e:
1492
+ raise self.ParseError(str(e))
1493
+ self.NextToken()
1494
+ return result
1495
+
1496
+ def TryConsumeFloat(self):
1497
+ try:
1498
+ self.ConsumeFloat()
1499
+ return True
1500
+ except ParseError:
1501
+ return False
1502
+
1503
+ def ConsumeFloat(self):
1504
+ """Consumes an floating point number.
1505
+
1506
+ Returns:
1507
+ The number parsed.
1508
+
1509
+ Raises:
1510
+ ParseError: If a floating point number couldn't be consumed.
1511
+ """
1512
+ try:
1513
+ result = ParseFloat(self.token)
1514
+ except ValueError as e:
1515
+ raise self.ParseError(str(e))
1516
+ self.NextToken()
1517
+ return result
1518
+
1519
+ def ConsumeBool(self):
1520
+ """Consumes a boolean value.
1521
+
1522
+ Returns:
1523
+ The bool parsed.
1524
+
1525
+ Raises:
1526
+ ParseError: If a boolean value couldn't be consumed.
1527
+ """
1528
+ try:
1529
+ result = ParseBool(self.token)
1530
+ except ValueError as e:
1531
+ raise self.ParseError(str(e))
1532
+ self.NextToken()
1533
+ return result
1534
+
1535
+ def TryConsumeByteString(self):
1536
+ try:
1537
+ self.ConsumeByteString()
1538
+ return True
1539
+ except ParseError:
1540
+ return False
1541
+
1542
+ def ConsumeString(self):
1543
+ """Consumes a string value.
1544
+
1545
+ Returns:
1546
+ The string parsed.
1547
+
1548
+ Raises:
1549
+ ParseError: If a string value couldn't be consumed.
1550
+ """
1551
+ the_bytes = self.ConsumeByteString()
1552
+ try:
1553
+ return str(the_bytes, 'utf-8')
1554
+ except UnicodeDecodeError as e:
1555
+ raise self._StringParseError(e)
1556
+
1557
+ def ConsumeByteString(self):
1558
+ """Consumes a byte array value.
1559
+
1560
+ Returns:
1561
+ The array parsed (as a string).
1562
+
1563
+ Raises:
1564
+ ParseError: If a byte array value couldn't be consumed.
1565
+ """
1566
+ the_list = [self._ConsumeSingleByteString()]
1567
+ while self.token and self.token[0] in _QUOTES:
1568
+ the_list.append(self._ConsumeSingleByteString())
1569
+ return b''.join(the_list)
1570
+
1571
+ def _ConsumeSingleByteString(self):
1572
+ """Consume one token of a string literal.
1573
+
1574
+ String literals (whether bytes or text) can come in multiple adjacent
1575
+ tokens which are automatically concatenated, like in C or Python. This
1576
+ method only consumes one token.
1577
+
1578
+ Returns:
1579
+ The token parsed.
1580
+ Raises:
1581
+ ParseError: When the wrong format data is found.
1582
+ """
1583
+ text = self.token
1584
+ if len(text) < 1 or text[0] not in _QUOTES:
1585
+ raise self.ParseError('Expected string but found: %r' % (text,))
1586
+
1587
+ if len(text) < 2 or text[-1] != text[0]:
1588
+ raise self.ParseError('String missing ending quote: %r' % (text,))
1589
+
1590
+ try:
1591
+ result = text_encoding.CUnescape(text[1:-1])
1592
+ except ValueError as e:
1593
+ raise self.ParseError(str(e))
1594
+ self.NextToken()
1595
+ return result
1596
+
1597
+ def ConsumeEnum(self, field):
1598
+ try:
1599
+ result = ParseEnum(field, self.token)
1600
+ except ValueError as e:
1601
+ raise self.ParseError(str(e))
1602
+ self.NextToken()
1603
+ return result
1604
+
1605
+ def ParseErrorPreviousToken(self, message):
1606
+ """Creates and *returns* a ParseError for the previously read token.
1607
+
1608
+ Args:
1609
+ message: A message to set for the exception.
1610
+
1611
+ Returns:
1612
+ A ParseError instance.
1613
+ """
1614
+ return ParseError(message, self._previous_line + 1,
1615
+ self._previous_column + 1)
1616
+
1617
+ def ParseError(self, message):
1618
+ """Creates and *returns* a ParseError for the current token."""
1619
+ return ParseError('\'' + self._current_line + '\': ' + message,
1620
+ self._line + 1, self._column + 1)
1621
+
1622
+ def _StringParseError(self, e):
1623
+ return self.ParseError('Couldn\'t parse string: ' + str(e))
1624
+
1625
+ def NextToken(self):
1626
+ """Reads the next meaningful token."""
1627
+ self._previous_line = self._line
1628
+ self._previous_column = self._column
1629
+ self.contains_silent_marker_before_current_token = False
1630
+
1631
+ self._column += len(self.token)
1632
+ self._SkipWhitespace()
1633
+
1634
+ if not self._more_lines:
1635
+ self.token = ''
1636
+ return
1637
+
1638
+ match = self._TOKEN.match(self._current_line, self._column)
1639
+ if not match and not self._skip_comments:
1640
+ match = self._COMMENT.match(self._current_line, self._column)
1641
+ if match:
1642
+ token = match.group(0)
1643
+ self.token = token
1644
+ else:
1645
+ self.token = self._current_line[self._column]
1646
+
1647
+ # Aliased so it can still be accessed by current visibility violators.
1648
+ # TODO: Migrate violators to textformat_tokenizer.
1649
+ _Tokenizer = Tokenizer # pylint: disable=invalid-name
1650
+
1651
+
1652
+ def _ConsumeInt32(tokenizer):
1653
+ """Consumes a signed 32bit integer number from tokenizer.
1654
+
1655
+ Args:
1656
+ tokenizer: A tokenizer used to parse the number.
1657
+
1658
+ Returns:
1659
+ The integer parsed.
1660
+
1661
+ Raises:
1662
+ ParseError: If a signed 32bit integer couldn't be consumed.
1663
+ """
1664
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
1665
+
1666
+
1667
+ def _ConsumeUint32(tokenizer):
1668
+ """Consumes an unsigned 32bit integer number from tokenizer.
1669
+
1670
+ Args:
1671
+ tokenizer: A tokenizer used to parse the number.
1672
+
1673
+ Returns:
1674
+ The integer parsed.
1675
+
1676
+ Raises:
1677
+ ParseError: If an unsigned 32bit integer couldn't be consumed.
1678
+ """
1679
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
1680
+
1681
+
1682
+ def _TryConsumeInt64(tokenizer):
1683
+ try:
1684
+ _ConsumeInt64(tokenizer)
1685
+ return True
1686
+ except ParseError:
1687
+ return False
1688
+
1689
+
1690
+ def _ConsumeInt64(tokenizer):
1691
+ """Consumes a signed 32bit integer number from tokenizer.
1692
+
1693
+ Args:
1694
+ tokenizer: A tokenizer used to parse the number.
1695
+
1696
+ Returns:
1697
+ The integer parsed.
1698
+
1699
+ Raises:
1700
+ ParseError: If a signed 32bit integer couldn't be consumed.
1701
+ """
1702
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=True)
1703
+
1704
+
1705
+ def _TryConsumeUint64(tokenizer):
1706
+ try:
1707
+ _ConsumeUint64(tokenizer)
1708
+ return True
1709
+ except ParseError:
1710
+ return False
1711
+
1712
+
1713
+ def _ConsumeUint64(tokenizer):
1714
+ """Consumes an unsigned 64bit integer number from tokenizer.
1715
+
1716
+ Args:
1717
+ tokenizer: A tokenizer used to parse the number.
1718
+
1719
+ Returns:
1720
+ The integer parsed.
1721
+
1722
+ Raises:
1723
+ ParseError: If an unsigned 64bit integer couldn't be consumed.
1724
+ """
1725
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=True)
1726
+
1727
+
1728
+ def _ConsumeInteger(tokenizer, is_signed=False, is_long=False):
1729
+ """Consumes an integer number from tokenizer.
1730
+
1731
+ Args:
1732
+ tokenizer: A tokenizer used to parse the number.
1733
+ is_signed: True if a signed integer must be parsed.
1734
+ is_long: True if a long integer must be parsed.
1735
+
1736
+ Returns:
1737
+ The integer parsed.
1738
+
1739
+ Raises:
1740
+ ParseError: If an integer with given characteristics couldn't be consumed.
1741
+ """
1742
+ try:
1743
+ result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long)
1744
+ except ValueError as e:
1745
+ raise tokenizer.ParseError(str(e))
1746
+ tokenizer.NextToken()
1747
+ return result
1748
+
1749
+
1750
+ def ParseInteger(text, is_signed=False, is_long=False):
1751
+ """Parses an integer.
1752
+
1753
+ Args:
1754
+ text: The text to parse.
1755
+ is_signed: True if a signed integer must be parsed.
1756
+ is_long: True if a long integer must be parsed.
1757
+
1758
+ Returns:
1759
+ The integer value.
1760
+
1761
+ Raises:
1762
+ ValueError: Thrown Iff the text is not a valid integer.
1763
+ """
1764
+ # Do the actual parsing. Exception handling is propagated to caller.
1765
+ result = _ParseAbstractInteger(text)
1766
+
1767
+ # Check if the integer is sane. Exceptions handled by callers.
1768
+ checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
1769
+ checker.CheckValue(result)
1770
+ return result
1771
+
1772
+
1773
+ def _ParseAbstractInteger(text):
1774
+ """Parses an integer without checking size/signedness.
1775
+
1776
+ Args:
1777
+ text: The text to parse.
1778
+
1779
+ Returns:
1780
+ The integer value.
1781
+
1782
+ Raises:
1783
+ ValueError: Thrown Iff the text is not a valid integer.
1784
+ """
1785
+ # Do the actual parsing. Exception handling is propagated to caller.
1786
+ orig_text = text
1787
+ c_octal_match = re.match(r'(-?)0(\d+)$', text)
1788
+ if c_octal_match:
1789
+ # Python 3 no longer supports 0755 octal syntax without the 'o', so
1790
+ # we always use the '0o' prefix for multi-digit numbers starting with 0.
1791
+ text = c_octal_match.group(1) + '0o' + c_octal_match.group(2)
1792
+ try:
1793
+ return int(text, 0)
1794
+ except ValueError:
1795
+ raise ValueError('Couldn\'t parse integer: %s' % orig_text)
1796
+
1797
+
1798
+ def ParseFloat(text):
1799
+ """Parse a floating point number.
1800
+
1801
+ Args:
1802
+ text: Text to parse.
1803
+
1804
+ Returns:
1805
+ The number parsed.
1806
+
1807
+ Raises:
1808
+ ValueError: If a floating point number couldn't be parsed.
1809
+ """
1810
+ if _FLOAT_OCTAL_PREFIX.match(text):
1811
+ raise ValueError('Invalid octal float: %s' % text)
1812
+ try:
1813
+ # Assume Python compatible syntax.
1814
+ return float(text)
1815
+ except ValueError:
1816
+ # Check alternative spellings.
1817
+ if _FLOAT_INFINITY.match(text):
1818
+ if text[0] == '-':
1819
+ return float('-inf')
1820
+ else:
1821
+ return float('inf')
1822
+ elif _FLOAT_NAN.match(text):
1823
+ return float('nan')
1824
+ else:
1825
+ # assume '1.0f' format
1826
+ try:
1827
+ return float(text.rstrip('fF'))
1828
+ except ValueError:
1829
+ raise ValueError("Couldn't parse float: %s" % text)
1830
+
1831
+
1832
+ def ParseBool(text):
1833
+ """Parse a boolean value.
1834
+
1835
+ Args:
1836
+ text: Text to parse.
1837
+
1838
+ Returns:
1839
+ Boolean values parsed
1840
+
1841
+ Raises:
1842
+ ValueError: If text is not a valid boolean.
1843
+ """
1844
+ if text in ('true', 't', '1', 'True'):
1845
+ return True
1846
+ elif text in ('false', 'f', '0', 'False'):
1847
+ return False
1848
+ else:
1849
+ raise ValueError('Expected "true" or "false".')
1850
+
1851
+
1852
+ def ParseEnum(field, value):
1853
+ """Parse an enum value.
1854
+
1855
+ The value can be specified by a number (the enum value), or by
1856
+ a string literal (the enum name).
1857
+
1858
+ Args:
1859
+ field: Enum field descriptor.
1860
+ value: String value.
1861
+
1862
+ Returns:
1863
+ Enum value number.
1864
+
1865
+ Raises:
1866
+ ValueError: If the enum value could not be parsed.
1867
+ """
1868
+ enum_descriptor = field.enum_type
1869
+ try:
1870
+ number = int(value, 0)
1871
+ except ValueError:
1872
+ # Identifier.
1873
+ enum_value = enum_descriptor.values_by_name.get(value, None)
1874
+ if enum_value is None:
1875
+ raise ValueError('Enum type "%s" has no value named %s.' %
1876
+ (enum_descriptor.full_name, value))
1877
+ else:
1878
+ if not field.enum_type.is_closed:
1879
+ return number
1880
+ enum_value = enum_descriptor.values_by_number.get(number, None)
1881
+ if enum_value is None:
1882
+ raise ValueError('Enum type "%s" has no value with number %d.' %
1883
+ (enum_descriptor.full_name, number))
1884
+ return enum_value.number