ZTWHHH commited on
Commit
92b0408
·
verified ·
1 Parent(s): 863a7ed

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +4 -0
  2. parrot/lib/libbz2.a +3 -0
  3. parrot/lib/libbz2.so.1.0 +3 -0
  4. parrot/lib/liblzma.so.5.6.4 +3 -0
  5. parrot/lib/libtinfow.a +3 -0
  6. parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/__init__.cpython-310.pyc +0 -0
  7. parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/field_mask_pb2.cpython-310.pyc +0 -0
  8. parrot/lib/python3.10/site-packages/google/protobuf/api_pb2.py +43 -0
  9. parrot/lib/python3.10/site-packages/google/protobuf/descriptor_database.py +154 -0
  10. parrot/lib/python3.10/site-packages/google/protobuf/descriptor_pb2.py +0 -0
  11. parrot/lib/python3.10/site-packages/google/protobuf/descriptor_pool.py +1355 -0
  12. parrot/lib/python3.10/site-packages/google/protobuf/duration_pb2.py +37 -0
  13. parrot/lib/python3.10/site-packages/google/protobuf/empty_pb2.py +37 -0
  14. parrot/lib/python3.10/site-packages/google/protobuf/field_mask_pb2.py +37 -0
  15. parrot/lib/python3.10/site-packages/google/protobuf/internal/__init__.py +7 -0
  16. parrot/lib/python3.10/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-310.pyc +0 -0
  17. parrot/lib/python3.10/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-310.pyc +0 -0
  18. parrot/lib/python3.10/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-310.pyc +0 -0
  19. parrot/lib/python3.10/site-packages/google/protobuf/internal/_parameterized.py +420 -0
  20. parrot/lib/python3.10/site-packages/google/protobuf/internal/api_implementation.py +142 -0
  21. parrot/lib/python3.10/site-packages/google/protobuf/internal/builder.py +117 -0
  22. parrot/lib/python3.10/site-packages/google/protobuf/internal/containers.py +677 -0
  23. parrot/lib/python3.10/site-packages/google/protobuf/internal/decoder.py +1036 -0
  24. parrot/lib/python3.10/site-packages/google/protobuf/internal/encoder.py +806 -0
  25. parrot/lib/python3.10/site-packages/google/protobuf/internal/enum_type_wrapper.py +112 -0
  26. parrot/lib/python3.10/site-packages/google/protobuf/internal/extension_dict.py +194 -0
  27. parrot/lib/python3.10/site-packages/google/protobuf/internal/field_mask.py +310 -0
  28. parrot/lib/python3.10/site-packages/google/protobuf/internal/message_listener.py +55 -0
  29. parrot/lib/python3.10/site-packages/google/protobuf/internal/python_edition_defaults.py +5 -0
  30. parrot/lib/python3.10/site-packages/google/protobuf/internal/python_message.py +1580 -0
  31. parrot/lib/python3.10/site-packages/google/protobuf/internal/testing_refleaks.py +119 -0
  32. parrot/lib/python3.10/site-packages/google/protobuf/internal/type_checkers.py +408 -0
  33. parrot/lib/python3.10/site-packages/google/protobuf/internal/well_known_types.py +678 -0
  34. parrot/lib/python3.10/site-packages/google/protobuf/internal/wire_format.py +245 -0
  35. parrot/lib/python3.10/site-packages/google/protobuf/json_format.py +1069 -0
  36. parrot/lib/python3.10/site-packages/google/protobuf/message.py +422 -0
  37. parrot/lib/python3.10/site-packages/google/protobuf/message_factory.py +237 -0
  38. parrot/lib/python3.10/site-packages/google/protobuf/proto_builder.py +111 -0
  39. parrot/lib/python3.10/site-packages/google/protobuf/proto_json.py +83 -0
  40. parrot/lib/python3.10/site-packages/google/protobuf/pyext/__init__.py +0 -0
  41. parrot/lib/python3.10/site-packages/google/protobuf/pyext/__pycache__/__init__.cpython-310.pyc +0 -0
  42. parrot/lib/python3.10/site-packages/google/protobuf/pyext/__pycache__/cpp_message.cpython-310.pyc +0 -0
  43. parrot/lib/python3.10/site-packages/google/protobuf/pyext/cpp_message.py +49 -0
  44. parrot/lib/python3.10/site-packages/google/protobuf/runtime_version.py +123 -0
  45. parrot/lib/python3.10/site-packages/google/protobuf/service.py +213 -0
  46. parrot/lib/python3.10/site-packages/google/protobuf/source_context_pb2.py +37 -0
  47. parrot/lib/python3.10/site-packages/google/protobuf/struct_pb2.py +47 -0
  48. parrot/lib/python3.10/site-packages/google/protobuf/symbol_database.py +197 -0
  49. parrot/lib/python3.10/site-packages/google/protobuf/testdata/__init__.py +0 -0
  50. parrot/lib/python3.10/site-packages/google/protobuf/testdata/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -86,3 +86,7 @@ parrot/lib/libtinfo.so filter=lfs diff=lfs merge=lfs -text
86
  parrot/lib/liblzma.so filter=lfs diff=lfs merge=lfs -text
87
  parrot/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
88
  parrot/lib/libform.a filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
86
  parrot/lib/liblzma.so filter=lfs diff=lfs merge=lfs -text
87
  parrot/lib/libtinfow.so.6.4 filter=lfs diff=lfs merge=lfs -text
88
  parrot/lib/libform.a filter=lfs diff=lfs merge=lfs -text
89
+ parrot/lib/libbz2.so.1.0 filter=lfs diff=lfs merge=lfs -text
90
+ parrot/lib/liblzma.so.5.6.4 filter=lfs diff=lfs merge=lfs -text
91
+ parrot/lib/libbz2.a filter=lfs diff=lfs merge=lfs -text
92
+ parrot/lib/libtinfow.a filter=lfs diff=lfs merge=lfs -text
parrot/lib/libbz2.a ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4377dc3d8f7542568b6365cd6bb06970b53c20e9a71b7d54271874f7868be500
3
+ size 264138
parrot/lib/libbz2.so.1.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4979469ae49ac144f62202f75bbdd69b17197aedb879d633337c8cf7e4aba301
3
+ size 229016
parrot/lib/liblzma.so.5.6.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b126e186cc43702dcacb626e0455f8aecf030d969d6d4a407ce4cc35293c503
3
+ size 218304
parrot/lib/libtinfow.a ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f0881f962753efab7fc0d5328dfcb6edbb8e71b93cfa3f90621f0ad9b467ea0
3
+ size 489850
parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (189 Bytes). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/__pycache__/field_mask_pb2.cpython-310.pyc ADDED
Binary file (1.44 kB). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/api_pb2.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/api.proto
5
+ # Protobuf Python Version: 5.28.2
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 2,
17
+ '',
18
+ 'google/protobuf/api.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+ from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
26
+ from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
27
+
28
+
29
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\xc1\x02\n\x03\x41pi\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x31\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.MethodR\x07methods\x12\x31\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12\x18\n\x07version\x18\x04 \x01(\tR\x07version\x12\x45\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContextR\rsourceContext\x12.\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.MixinR\x06mixins\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\"\xb2\x02\n\x06Method\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12(\n\x10request_type_url\x18\x02 \x01(\tR\x0erequestTypeUrl\x12+\n\x11request_streaming\x18\x03 \x01(\x08R\x10requestStreaming\x12*\n\x11response_type_url\x18\x04 \x01(\tR\x0fresponseTypeUrl\x12-\n\x12response_streaming\x18\x05 \x01(\x08R\x11responseStreaming\x12\x31\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.OptionR\x07options\x12/\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.SyntaxR\x06syntax\"/\n\x05Mixin\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x12\n\x04root\x18\x02 \x01(\tR\x04rootBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
30
+
31
+ _globals = globals()
32
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
33
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', _globals)
34
+ if not _descriptor._USE_C_DESCRIPTORS:
35
+ _globals['DESCRIPTOR']._loaded_options = None
36
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
37
+ _globals['_API']._serialized_start=113
38
+ _globals['_API']._serialized_end=434
39
+ _globals['_METHOD']._serialized_start=437
40
+ _globals['_METHOD']._serialized_end=743
41
+ _globals['_MIXIN']._serialized_start=745
42
+ _globals['_MIXIN']._serialized_end=792
43
+ # @@protoc_insertion_point(module_scope)
parrot/lib/python3.10/site-packages/google/protobuf/descriptor_database.py ADDED
@@ -0,0 +1,154 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides a container for DescriptorProtos."""
9
+
10
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
11
+
12
+ import warnings
13
+
14
+
15
+ class Error(Exception):
16
+ pass
17
+
18
+
19
+ class DescriptorDatabaseConflictingDefinitionError(Error):
20
+ """Raised when a proto is added with the same name & different descriptor."""
21
+
22
+
23
+ class DescriptorDatabase(object):
24
+ """A container accepting FileDescriptorProtos and maps DescriptorProtos."""
25
+
26
+ def __init__(self):
27
+ self._file_desc_protos_by_file = {}
28
+ self._file_desc_protos_by_symbol = {}
29
+
30
+ def Add(self, file_desc_proto):
31
+ """Adds the FileDescriptorProto and its types to this database.
32
+
33
+ Args:
34
+ file_desc_proto: The FileDescriptorProto to add.
35
+ Raises:
36
+ DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
37
+ add a proto with the same name but different definition than an
38
+ existing proto in the database.
39
+ """
40
+ proto_name = file_desc_proto.name
41
+ if proto_name not in self._file_desc_protos_by_file:
42
+ self._file_desc_protos_by_file[proto_name] = file_desc_proto
43
+ elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
44
+ raise DescriptorDatabaseConflictingDefinitionError(
45
+ '%s already added, but with different descriptor.' % proto_name)
46
+ else:
47
+ return
48
+
49
+ # Add all the top-level descriptors to the index.
50
+ package = file_desc_proto.package
51
+ for message in file_desc_proto.message_type:
52
+ for name in _ExtractSymbols(message, package):
53
+ self._AddSymbol(name, file_desc_proto)
54
+ for enum in file_desc_proto.enum_type:
55
+ self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
56
+ for enum_value in enum.value:
57
+ self._file_desc_protos_by_symbol[
58
+ '.'.join((package, enum_value.name))] = file_desc_proto
59
+ for extension in file_desc_proto.extension:
60
+ self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
61
+ for service in file_desc_proto.service:
62
+ self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
63
+
64
+ def FindFileByName(self, name):
65
+ """Finds the file descriptor proto by file name.
66
+
67
+ Typically the file name is a relative path ending to a .proto file. The
68
+ proto with the given name will have to have been added to this database
69
+ using the Add method or else an error will be raised.
70
+
71
+ Args:
72
+ name: The file name to find.
73
+
74
+ Returns:
75
+ The file descriptor proto matching the name.
76
+
77
+ Raises:
78
+ KeyError if no file by the given name was added.
79
+ """
80
+
81
+ return self._file_desc_protos_by_file[name]
82
+
83
+ def FindFileContainingSymbol(self, symbol):
84
+ """Finds the file descriptor proto containing the specified symbol.
85
+
86
+ The symbol should be a fully qualified name including the file descriptor's
87
+ package and any containing messages. Some examples:
88
+
89
+ 'some.package.name.Message'
90
+ 'some.package.name.Message.NestedEnum'
91
+ 'some.package.name.Message.some_field'
92
+
93
+ The file descriptor proto containing the specified symbol must be added to
94
+ this database using the Add method or else an error will be raised.
95
+
96
+ Args:
97
+ symbol: The fully qualified symbol name.
98
+
99
+ Returns:
100
+ The file descriptor proto containing the symbol.
101
+
102
+ Raises:
103
+ KeyError if no file contains the specified symbol.
104
+ """
105
+ try:
106
+ return self._file_desc_protos_by_symbol[symbol]
107
+ except KeyError:
108
+ # Fields, enum values, and nested extensions are not in
109
+ # _file_desc_protos_by_symbol. Try to find the top level
110
+ # descriptor. Non-existent nested symbol under a valid top level
111
+ # descriptor can also be found. The behavior is the same with
112
+ # protobuf C++.
113
+ top_level, _, _ = symbol.rpartition('.')
114
+ try:
115
+ return self._file_desc_protos_by_symbol[top_level]
116
+ except KeyError:
117
+ # Raise the original symbol as a KeyError for better diagnostics.
118
+ raise KeyError(symbol)
119
+
120
+ def FindFileContainingExtension(self, extendee_name, extension_number):
121
+ # TODO: implement this API.
122
+ return None
123
+
124
+ def FindAllExtensionNumbers(self, extendee_name):
125
+ # TODO: implement this API.
126
+ return []
127
+
128
+ def _AddSymbol(self, name, file_desc_proto):
129
+ if name in self._file_desc_protos_by_symbol:
130
+ warn_msg = ('Conflict register for file "' + file_desc_proto.name +
131
+ '": ' + name +
132
+ ' is already defined in file "' +
133
+ self._file_desc_protos_by_symbol[name].name + '"')
134
+ warnings.warn(warn_msg, RuntimeWarning)
135
+ self._file_desc_protos_by_symbol[name] = file_desc_proto
136
+
137
+
138
+ def _ExtractSymbols(desc_proto, package):
139
+ """Pulls out all the symbols from a descriptor proto.
140
+
141
+ Args:
142
+ desc_proto: The proto to extract symbols from.
143
+ package: The package containing the descriptor type.
144
+
145
+ Yields:
146
+ The fully qualified name found in the descriptor.
147
+ """
148
+ message_name = package + '.' + desc_proto.name if package else desc_proto.name
149
+ yield message_name
150
+ for nested_type in desc_proto.nested_type:
151
+ for symbol in _ExtractSymbols(nested_type, message_name):
152
+ yield symbol
153
+ for enum_type in desc_proto.enum_type:
154
+ yield '.'.join((message_name, enum_type.name))
parrot/lib/python3.10/site-packages/google/protobuf/descriptor_pb2.py ADDED
The diff for this file is too large to render. See raw diff
 
parrot/lib/python3.10/site-packages/google/protobuf/descriptor_pool.py ADDED
@@ -0,0 +1,1355 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides DescriptorPool to use as a container for proto2 descriptors.
9
+
10
+ The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
11
+ a collection of protocol buffer descriptors for use when dynamically creating
12
+ message types at runtime.
13
+
14
+ For most applications protocol buffers should be used via modules generated by
15
+ the protocol buffer compiler tool. This should only be used when the type of
16
+ protocol buffers used in an application or library cannot be predetermined.
17
+
18
+ Below is a straightforward example on how to use this class::
19
+
20
+ pool = DescriptorPool()
21
+ file_descriptor_protos = [ ... ]
22
+ for file_descriptor_proto in file_descriptor_protos:
23
+ pool.Add(file_descriptor_proto)
24
+ my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
25
+
26
+ The message descriptor can be used in conjunction with the message_factory
27
+ module in order to create a protocol buffer class that can be encoded and
28
+ decoded.
29
+
30
+ If you want to get a Python class for the specified proto, use the
31
+ helper functions inside google.protobuf.message_factory
32
+ directly instead of this class.
33
+ """
34
+
35
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
36
+
37
+ import collections
38
+ import threading
39
+ import warnings
40
+
41
+ from google.protobuf import descriptor
42
+ from google.protobuf import descriptor_database
43
+ from google.protobuf import text_encoding
44
+ from google.protobuf.internal import python_edition_defaults
45
+ from google.protobuf.internal import python_message
46
+
47
+ _USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
48
+
49
+
50
+ def _NormalizeFullyQualifiedName(name):
51
+ """Remove leading period from fully-qualified type name.
52
+
53
+ Due to b/13860351 in descriptor_database.py, types in the root namespace are
54
+ generated with a leading period. This function removes that prefix.
55
+
56
+ Args:
57
+ name (str): The fully-qualified symbol name.
58
+
59
+ Returns:
60
+ str: The normalized fully-qualified symbol name.
61
+ """
62
+ return name.lstrip('.')
63
+
64
+
65
+ def _OptionsOrNone(descriptor_proto):
66
+ """Returns the value of the field `options`, or None if it is not set."""
67
+ if descriptor_proto.HasField('options'):
68
+ return descriptor_proto.options
69
+ else:
70
+ return None
71
+
72
+
73
+ def _IsMessageSetExtension(field):
74
+ return (field.is_extension and
75
+ field.containing_type.has_options and
76
+ field.containing_type.GetOptions().message_set_wire_format and
77
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
78
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
79
+
80
+ _edition_defaults_lock = threading.Lock()
81
+
82
+
83
+ class DescriptorPool(object):
84
+ """A collection of protobufs dynamically constructed by descriptor protos."""
85
+
86
+ if _USE_C_DESCRIPTORS:
87
+
88
+ def __new__(cls, descriptor_db=None):
89
+ # pylint: disable=protected-access
90
+ return descriptor._message.DescriptorPool(descriptor_db)
91
+
92
+ def __init__(
93
+ self, descriptor_db=None, use_deprecated_legacy_json_field_conflicts=False
94
+ ):
95
+ """Initializes a Pool of proto buffs.
96
+
97
+ The descriptor_db argument to the constructor is provided to allow
98
+ specialized file descriptor proto lookup code to be triggered on demand. An
99
+ example would be an implementation which will read and compile a file
100
+ specified in a call to FindFileByName() and not require the call to Add()
101
+ at all. Results from this database will be cached internally here as well.
102
+
103
+ Args:
104
+ descriptor_db: A secondary source of file descriptors.
105
+ use_deprecated_legacy_json_field_conflicts: Unused, for compatibility with
106
+ C++.
107
+ """
108
+
109
+ self._internal_db = descriptor_database.DescriptorDatabase()
110
+ self._descriptor_db = descriptor_db
111
+ self._descriptors = {}
112
+ self._enum_descriptors = {}
113
+ self._service_descriptors = {}
114
+ self._file_descriptors = {}
115
+ self._toplevel_extensions = {}
116
+ self._top_enum_values = {}
117
+ # We store extensions in two two-level mappings: The first key is the
118
+ # descriptor of the message being extended, the second key is the extension
119
+ # full name or its tag number.
120
+ self._extensions_by_name = collections.defaultdict(dict)
121
+ self._extensions_by_number = collections.defaultdict(dict)
122
+ self._serialized_edition_defaults = (
123
+ python_edition_defaults._PROTOBUF_INTERNAL_PYTHON_EDITION_DEFAULTS
124
+ )
125
+ self._edition_defaults = None
126
+ self._feature_cache = dict()
127
+
128
+ def _CheckConflictRegister(self, desc, desc_name, file_name):
129
+ """Check if the descriptor name conflicts with another of the same name.
130
+
131
+ Args:
132
+ desc: Descriptor of a message, enum, service, extension or enum value.
133
+ desc_name (str): the full name of desc.
134
+ file_name (str): The file name of descriptor.
135
+ """
136
+ for register, descriptor_type in [
137
+ (self._descriptors, descriptor.Descriptor),
138
+ (self._enum_descriptors, descriptor.EnumDescriptor),
139
+ (self._service_descriptors, descriptor.ServiceDescriptor),
140
+ (self._toplevel_extensions, descriptor.FieldDescriptor),
141
+ (self._top_enum_values, descriptor.EnumValueDescriptor)]:
142
+ if desc_name in register:
143
+ old_desc = register[desc_name]
144
+ if isinstance(old_desc, descriptor.EnumValueDescriptor):
145
+ old_file = old_desc.type.file.name
146
+ else:
147
+ old_file = old_desc.file.name
148
+
149
+ if not isinstance(desc, descriptor_type) or (
150
+ old_file != file_name):
151
+ error_msg = ('Conflict register for file "' + file_name +
152
+ '": ' + desc_name +
153
+ ' is already defined in file "' +
154
+ old_file + '". Please fix the conflict by adding '
155
+ 'package name on the proto file, or use different '
156
+ 'name for the duplication.')
157
+ if isinstance(desc, descriptor.EnumValueDescriptor):
158
+ error_msg += ('\nNote: enum values appear as '
159
+ 'siblings of the enum type instead of '
160
+ 'children of it.')
161
+
162
+ raise TypeError(error_msg)
163
+
164
+ return
165
+
166
+ def Add(self, file_desc_proto):
167
+ """Adds the FileDescriptorProto and its types to this pool.
168
+
169
+ Args:
170
+ file_desc_proto (FileDescriptorProto): The file descriptor to add.
171
+ """
172
+
173
+ self._internal_db.Add(file_desc_proto)
174
+
175
+ def AddSerializedFile(self, serialized_file_desc_proto):
176
+ """Adds the FileDescriptorProto and its types to this pool.
177
+
178
+ Args:
179
+ serialized_file_desc_proto (bytes): A bytes string, serialization of the
180
+ :class:`FileDescriptorProto` to add.
181
+
182
+ Returns:
183
+ FileDescriptor: Descriptor for the added file.
184
+ """
185
+
186
+ # pylint: disable=g-import-not-at-top
187
+ from google.protobuf import descriptor_pb2
188
+ file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
189
+ serialized_file_desc_proto)
190
+ file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
191
+ file_desc.serialized_pb = serialized_file_desc_proto
192
+ return file_desc
193
+
194
+ # Never call this method. It is for internal usage only.
195
+ def _AddDescriptor(self, desc):
196
+ """Adds a Descriptor to the pool, non-recursively.
197
+
198
+ If the Descriptor contains nested messages or enums, the caller must
199
+ explicitly register them. This method also registers the FileDescriptor
200
+ associated with the message.
201
+
202
+ Args:
203
+ desc: A Descriptor.
204
+ """
205
+ if not isinstance(desc, descriptor.Descriptor):
206
+ raise TypeError('Expected instance of descriptor.Descriptor.')
207
+
208
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
209
+
210
+ self._descriptors[desc.full_name] = desc
211
+ self._AddFileDescriptor(desc.file)
212
+
213
+ # Never call this method. It is for internal usage only.
214
+ def _AddEnumDescriptor(self, enum_desc):
215
+ """Adds an EnumDescriptor to the pool.
216
+
217
+ This method also registers the FileDescriptor associated with the enum.
218
+
219
+ Args:
220
+ enum_desc: An EnumDescriptor.
221
+ """
222
+
223
+ if not isinstance(enum_desc, descriptor.EnumDescriptor):
224
+ raise TypeError('Expected instance of descriptor.EnumDescriptor.')
225
+
226
+ file_name = enum_desc.file.name
227
+ self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
228
+ self._enum_descriptors[enum_desc.full_name] = enum_desc
229
+
230
+ # Top enum values need to be indexed.
231
+ # Count the number of dots to see whether the enum is toplevel or nested
232
+ # in a message. We cannot use enum_desc.containing_type at this stage.
233
+ if enum_desc.file.package:
234
+ top_level = (enum_desc.full_name.count('.')
235
+ - enum_desc.file.package.count('.') == 1)
236
+ else:
237
+ top_level = enum_desc.full_name.count('.') == 0
238
+ if top_level:
239
+ file_name = enum_desc.file.name
240
+ package = enum_desc.file.package
241
+ for enum_value in enum_desc.values:
242
+ full_name = _NormalizeFullyQualifiedName(
243
+ '.'.join((package, enum_value.name)))
244
+ self._CheckConflictRegister(enum_value, full_name, file_name)
245
+ self._top_enum_values[full_name] = enum_value
246
+ self._AddFileDescriptor(enum_desc.file)
247
+
248
+ # Never call this method. It is for internal usage only.
249
+ def _AddServiceDescriptor(self, service_desc):
250
+ """Adds a ServiceDescriptor to the pool.
251
+
252
+ Args:
253
+ service_desc: A ServiceDescriptor.
254
+ """
255
+
256
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
257
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
258
+
259
+ self._CheckConflictRegister(service_desc, service_desc.full_name,
260
+ service_desc.file.name)
261
+ self._service_descriptors[service_desc.full_name] = service_desc
262
+
263
+ # Never call this method. It is for internal usage only.
264
+ def _AddExtensionDescriptor(self, extension):
265
+ """Adds a FieldDescriptor describing an extension to the pool.
266
+
267
+ Args:
268
+ extension: A FieldDescriptor.
269
+
270
+ Raises:
271
+ AssertionError: when another extension with the same number extends the
272
+ same message.
273
+ TypeError: when the specified extension is not a
274
+ descriptor.FieldDescriptor.
275
+ """
276
+ if not (isinstance(extension, descriptor.FieldDescriptor) and
277
+ extension.is_extension):
278
+ raise TypeError('Expected an extension descriptor.')
279
+
280
+ if extension.extension_scope is None:
281
+ self._CheckConflictRegister(
282
+ extension, extension.full_name, extension.file.name)
283
+ self._toplevel_extensions[extension.full_name] = extension
284
+
285
+ try:
286
+ existing_desc = self._extensions_by_number[
287
+ extension.containing_type][extension.number]
288
+ except KeyError:
289
+ pass
290
+ else:
291
+ if extension is not existing_desc:
292
+ raise AssertionError(
293
+ 'Extensions "%s" and "%s" both try to extend message type "%s" '
294
+ 'with field number %d.' %
295
+ (extension.full_name, existing_desc.full_name,
296
+ extension.containing_type.full_name, extension.number))
297
+
298
+ self._extensions_by_number[extension.containing_type][
299
+ extension.number] = extension
300
+ self._extensions_by_name[extension.containing_type][
301
+ extension.full_name] = extension
302
+
303
+ # Also register MessageSet extensions with the type name.
304
+ if _IsMessageSetExtension(extension):
305
+ self._extensions_by_name[extension.containing_type][
306
+ extension.message_type.full_name] = extension
307
+
308
+ if hasattr(extension.containing_type, '_concrete_class'):
309
+ python_message._AttachFieldHelpers(
310
+ extension.containing_type._concrete_class, extension)
311
+
312
+ # Never call this method. It is for internal usage only.
313
+ def _InternalAddFileDescriptor(self, file_desc):
314
+ """Adds a FileDescriptor to the pool, non-recursively.
315
+
316
+ If the FileDescriptor contains messages or enums, the caller must explicitly
317
+ register them.
318
+
319
+ Args:
320
+ file_desc: A FileDescriptor.
321
+ """
322
+
323
+ self._AddFileDescriptor(file_desc)
324
+
325
+ def _AddFileDescriptor(self, file_desc):
326
+ """Adds a FileDescriptor to the pool, non-recursively.
327
+
328
+ If the FileDescriptor contains messages or enums, the caller must explicitly
329
+ register them.
330
+
331
+ Args:
332
+ file_desc: A FileDescriptor.
333
+ """
334
+
335
+ if not isinstance(file_desc, descriptor.FileDescriptor):
336
+ raise TypeError('Expected instance of descriptor.FileDescriptor.')
337
+ self._file_descriptors[file_desc.name] = file_desc
338
+
339
+ def FindFileByName(self, file_name):
340
+ """Gets a FileDescriptor by file name.
341
+
342
+ Args:
343
+ file_name (str): The path to the file to get a descriptor for.
344
+
345
+ Returns:
346
+ FileDescriptor: The descriptor for the named file.
347
+
348
+ Raises:
349
+ KeyError: if the file cannot be found in the pool.
350
+ """
351
+
352
+ try:
353
+ return self._file_descriptors[file_name]
354
+ except KeyError:
355
+ pass
356
+
357
+ try:
358
+ file_proto = self._internal_db.FindFileByName(file_name)
359
+ except KeyError as error:
360
+ if self._descriptor_db:
361
+ file_proto = self._descriptor_db.FindFileByName(file_name)
362
+ else:
363
+ raise error
364
+ if not file_proto:
365
+ raise KeyError('Cannot find a file named %s' % file_name)
366
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
367
+
368
+ def FindFileContainingSymbol(self, symbol):
369
+ """Gets the FileDescriptor for the file containing the specified symbol.
370
+
371
+ Args:
372
+ symbol (str): The name of the symbol to search for.
373
+
374
+ Returns:
375
+ FileDescriptor: Descriptor for the file that contains the specified
376
+ symbol.
377
+
378
+ Raises:
379
+ KeyError: if the file cannot be found in the pool.
380
+ """
381
+
382
+ symbol = _NormalizeFullyQualifiedName(symbol)
383
+ try:
384
+ return self._InternalFindFileContainingSymbol(symbol)
385
+ except KeyError:
386
+ pass
387
+
388
+ try:
389
+ # Try fallback database. Build and find again if possible.
390
+ self._FindFileContainingSymbolInDb(symbol)
391
+ return self._InternalFindFileContainingSymbol(symbol)
392
+ except KeyError:
393
+ raise KeyError('Cannot find a file containing %s' % symbol)
394
+
395
+ def _InternalFindFileContainingSymbol(self, symbol):
396
+ """Gets the already built FileDescriptor containing the specified symbol.
397
+
398
+ Args:
399
+ symbol (str): The name of the symbol to search for.
400
+
401
+ Returns:
402
+ FileDescriptor: Descriptor for the file that contains the specified
403
+ symbol.
404
+
405
+ Raises:
406
+ KeyError: if the file cannot be found in the pool.
407
+ """
408
+ try:
409
+ return self._descriptors[symbol].file
410
+ except KeyError:
411
+ pass
412
+
413
+ try:
414
+ return self._enum_descriptors[symbol].file
415
+ except KeyError:
416
+ pass
417
+
418
+ try:
419
+ return self._service_descriptors[symbol].file
420
+ except KeyError:
421
+ pass
422
+
423
+ try:
424
+ return self._top_enum_values[symbol].type.file
425
+ except KeyError:
426
+ pass
427
+
428
+ try:
429
+ return self._toplevel_extensions[symbol].file
430
+ except KeyError:
431
+ pass
432
+
433
+ # Try fields, enum values and nested extensions inside a message.
434
+ top_name, _, sub_name = symbol.rpartition('.')
435
+ try:
436
+ message = self.FindMessageTypeByName(top_name)
437
+ assert (sub_name in message.extensions_by_name or
438
+ sub_name in message.fields_by_name or
439
+ sub_name in message.enum_values_by_name)
440
+ return message.file
441
+ except (KeyError, AssertionError):
442
+ raise KeyError('Cannot find a file containing %s' % symbol)
443
+
444
+ def FindMessageTypeByName(self, full_name):
445
+ """Loads the named descriptor from the pool.
446
+
447
+ Args:
448
+ full_name (str): The full name of the descriptor to load.
449
+
450
+ Returns:
451
+ Descriptor: The descriptor for the named type.
452
+
453
+ Raises:
454
+ KeyError: if the message cannot be found in the pool.
455
+ """
456
+
457
+ full_name = _NormalizeFullyQualifiedName(full_name)
458
+ if full_name not in self._descriptors:
459
+ self._FindFileContainingSymbolInDb(full_name)
460
+ return self._descriptors[full_name]
461
+
462
+ def FindEnumTypeByName(self, full_name):
463
+ """Loads the named enum descriptor from the pool.
464
+
465
+ Args:
466
+ full_name (str): The full name of the enum descriptor to load.
467
+
468
+ Returns:
469
+ EnumDescriptor: The enum descriptor for the named type.
470
+
471
+ Raises:
472
+ KeyError: if the enum cannot be found in the pool.
473
+ """
474
+
475
+ full_name = _NormalizeFullyQualifiedName(full_name)
476
+ if full_name not in self._enum_descriptors:
477
+ self._FindFileContainingSymbolInDb(full_name)
478
+ return self._enum_descriptors[full_name]
479
+
480
+ def FindFieldByName(self, full_name):
481
+ """Loads the named field descriptor from the pool.
482
+
483
+ Args:
484
+ full_name (str): The full name of the field descriptor to load.
485
+
486
+ Returns:
487
+ FieldDescriptor: The field descriptor for the named field.
488
+
489
+ Raises:
490
+ KeyError: if the field cannot be found in the pool.
491
+ """
492
+ full_name = _NormalizeFullyQualifiedName(full_name)
493
+ message_name, _, field_name = full_name.rpartition('.')
494
+ message_descriptor = self.FindMessageTypeByName(message_name)
495
+ return message_descriptor.fields_by_name[field_name]
496
+
497
+ def FindOneofByName(self, full_name):
498
+ """Loads the named oneof descriptor from the pool.
499
+
500
+ Args:
501
+ full_name (str): The full name of the oneof descriptor to load.
502
+
503
+ Returns:
504
+ OneofDescriptor: The oneof descriptor for the named oneof.
505
+
506
+ Raises:
507
+ KeyError: if the oneof cannot be found in the pool.
508
+ """
509
+ full_name = _NormalizeFullyQualifiedName(full_name)
510
+ message_name, _, oneof_name = full_name.rpartition('.')
511
+ message_descriptor = self.FindMessageTypeByName(message_name)
512
+ return message_descriptor.oneofs_by_name[oneof_name]
513
+
514
+ def FindExtensionByName(self, full_name):
515
+ """Loads the named extension descriptor from the pool.
516
+
517
+ Args:
518
+ full_name (str): The full name of the extension descriptor to load.
519
+
520
+ Returns:
521
+ FieldDescriptor: The field descriptor for the named extension.
522
+
523
+ Raises:
524
+ KeyError: if the extension cannot be found in the pool.
525
+ """
526
+ full_name = _NormalizeFullyQualifiedName(full_name)
527
+ try:
528
+ # The proto compiler does not give any link between the FileDescriptor
529
+ # and top-level extensions unless the FileDescriptorProto is added to
530
+ # the DescriptorDatabase, but this can impact memory usage.
531
+ # So we registered these extensions by name explicitly.
532
+ return self._toplevel_extensions[full_name]
533
+ except KeyError:
534
+ pass
535
+ message_name, _, extension_name = full_name.rpartition('.')
536
+ try:
537
+ # Most extensions are nested inside a message.
538
+ scope = self.FindMessageTypeByName(message_name)
539
+ except KeyError:
540
+ # Some extensions are defined at file scope.
541
+ scope = self._FindFileContainingSymbolInDb(full_name)
542
+ return scope.extensions_by_name[extension_name]
543
+
544
+ def FindExtensionByNumber(self, message_descriptor, number):
545
+ """Gets the extension of the specified message with the specified number.
546
+
547
+ Extensions have to be registered to this pool by calling :func:`Add` or
548
+ :func:`AddExtensionDescriptor`.
549
+
550
+ Args:
551
+ message_descriptor (Descriptor): descriptor of the extended message.
552
+ number (int): Number of the extension field.
553
+
554
+ Returns:
555
+ FieldDescriptor: The descriptor for the extension.
556
+
557
+ Raises:
558
+ KeyError: when no extension with the given number is known for the
559
+ specified message.
560
+ """
561
+ try:
562
+ return self._extensions_by_number[message_descriptor][number]
563
+ except KeyError:
564
+ self._TryLoadExtensionFromDB(message_descriptor, number)
565
+ return self._extensions_by_number[message_descriptor][number]
566
+
567
+ def FindAllExtensions(self, message_descriptor):
568
+ """Gets all the known extensions of a given message.
569
+
570
+ Extensions have to be registered to this pool by build related
571
+ :func:`Add` or :func:`AddExtensionDescriptor`.
572
+
573
+ Args:
574
+ message_descriptor (Descriptor): Descriptor of the extended message.
575
+
576
+ Returns:
577
+ list[FieldDescriptor]: Field descriptors describing the extensions.
578
+ """
579
+ # Fallback to descriptor db if FindAllExtensionNumbers is provided.
580
+ if self._descriptor_db and hasattr(
581
+ self._descriptor_db, 'FindAllExtensionNumbers'):
582
+ full_name = message_descriptor.full_name
583
+ all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
584
+ for number in all_numbers:
585
+ if number in self._extensions_by_number[message_descriptor]:
586
+ continue
587
+ self._TryLoadExtensionFromDB(message_descriptor, number)
588
+
589
+ return list(self._extensions_by_number[message_descriptor].values())
590
+
591
+ def _TryLoadExtensionFromDB(self, message_descriptor, number):
592
+ """Try to Load extensions from descriptor db.
593
+
594
+ Args:
595
+ message_descriptor: descriptor of the extended message.
596
+ number: the extension number that needs to be loaded.
597
+ """
598
+ if not self._descriptor_db:
599
+ return
600
+ # Only supported when FindFileContainingExtension is provided.
601
+ if not hasattr(
602
+ self._descriptor_db, 'FindFileContainingExtension'):
603
+ return
604
+
605
+ full_name = message_descriptor.full_name
606
+ file_proto = self._descriptor_db.FindFileContainingExtension(
607
+ full_name, number)
608
+
609
+ if file_proto is None:
610
+ return
611
+
612
+ try:
613
+ self._ConvertFileProtoToFileDescriptor(file_proto)
614
+ except:
615
+ warn_msg = ('Unable to load proto file %s for extension number %d.' %
616
+ (file_proto.name, number))
617
+ warnings.warn(warn_msg, RuntimeWarning)
618
+
619
+ def FindServiceByName(self, full_name):
620
+ """Loads the named service descriptor from the pool.
621
+
622
+ Args:
623
+ full_name (str): The full name of the service descriptor to load.
624
+
625
+ Returns:
626
+ ServiceDescriptor: The service descriptor for the named service.
627
+
628
+ Raises:
629
+ KeyError: if the service cannot be found in the pool.
630
+ """
631
+ full_name = _NormalizeFullyQualifiedName(full_name)
632
+ if full_name not in self._service_descriptors:
633
+ self._FindFileContainingSymbolInDb(full_name)
634
+ return self._service_descriptors[full_name]
635
+
636
+ def FindMethodByName(self, full_name):
637
+ """Loads the named service method descriptor from the pool.
638
+
639
+ Args:
640
+ full_name (str): The full name of the method descriptor to load.
641
+
642
+ Returns:
643
+ MethodDescriptor: The method descriptor for the service method.
644
+
645
+ Raises:
646
+ KeyError: if the method cannot be found in the pool.
647
+ """
648
+ full_name = _NormalizeFullyQualifiedName(full_name)
649
+ service_name, _, method_name = full_name.rpartition('.')
650
+ service_descriptor = self.FindServiceByName(service_name)
651
+ return service_descriptor.methods_by_name[method_name]
652
+
653
+ def SetFeatureSetDefaults(self, defaults):
654
+ """Sets the default feature mappings used during the build.
655
+
656
+ Args:
657
+ defaults: a FeatureSetDefaults message containing the new mappings.
658
+ """
659
+ if self._edition_defaults is not None:
660
+ raise ValueError(
661
+ "Feature set defaults can't be changed once the pool has started"
662
+ ' building!'
663
+ )
664
+
665
+ # pylint: disable=g-import-not-at-top
666
+ from google.protobuf import descriptor_pb2
667
+
668
+ if not isinstance(defaults, descriptor_pb2.FeatureSetDefaults):
669
+ raise TypeError('SetFeatureSetDefaults called with invalid type')
670
+
671
+
672
+ if defaults.minimum_edition > defaults.maximum_edition:
673
+ raise ValueError(
674
+ 'Invalid edition range %s to %s'
675
+ % (
676
+ descriptor_pb2.Edition.Name(defaults.minimum_edition),
677
+ descriptor_pb2.Edition.Name(defaults.maximum_edition),
678
+ )
679
+ )
680
+
681
+ prev_edition = descriptor_pb2.Edition.EDITION_UNKNOWN
682
+ for d in defaults.defaults:
683
+ if d.edition == descriptor_pb2.Edition.EDITION_UNKNOWN:
684
+ raise ValueError('Invalid edition EDITION_UNKNOWN specified')
685
+ if prev_edition >= d.edition:
686
+ raise ValueError(
687
+ 'Feature set defaults are not strictly increasing. %s is greater'
688
+ ' than or equal to %s'
689
+ % (
690
+ descriptor_pb2.Edition.Name(prev_edition),
691
+ descriptor_pb2.Edition.Name(d.edition),
692
+ )
693
+ )
694
+ prev_edition = d.edition
695
+ self._edition_defaults = defaults
696
+
697
+ def _CreateDefaultFeatures(self, edition):
698
+ """Creates a FeatureSet message with defaults for a specific edition.
699
+
700
+ Args:
701
+ edition: the edition to generate defaults for.
702
+
703
+ Returns:
704
+ A FeatureSet message with defaults for a specific edition.
705
+ """
706
+ # pylint: disable=g-import-not-at-top
707
+ from google.protobuf import descriptor_pb2
708
+
709
+ with _edition_defaults_lock:
710
+ if not self._edition_defaults:
711
+ self._edition_defaults = descriptor_pb2.FeatureSetDefaults()
712
+ self._edition_defaults.ParseFromString(
713
+ self._serialized_edition_defaults
714
+ )
715
+
716
+ if edition < self._edition_defaults.minimum_edition:
717
+ raise TypeError(
718
+ 'Edition %s is earlier than the minimum supported edition %s!'
719
+ % (
720
+ descriptor_pb2.Edition.Name(edition),
721
+ descriptor_pb2.Edition.Name(
722
+ self._edition_defaults.minimum_edition
723
+ ),
724
+ )
725
+ )
726
+ if edition > self._edition_defaults.maximum_edition:
727
+ raise TypeError(
728
+ 'Edition %s is later than the maximum supported edition %s!'
729
+ % (
730
+ descriptor_pb2.Edition.Name(edition),
731
+ descriptor_pb2.Edition.Name(
732
+ self._edition_defaults.maximum_edition
733
+ ),
734
+ )
735
+ )
736
+ found = None
737
+ for d in self._edition_defaults.defaults:
738
+ if d.edition > edition:
739
+ break
740
+ found = d
741
+ if found is None:
742
+ raise TypeError(
743
+ 'No valid default found for edition %s!'
744
+ % descriptor_pb2.Edition.Name(edition)
745
+ )
746
+
747
+ defaults = descriptor_pb2.FeatureSet()
748
+ defaults.CopyFrom(found.fixed_features)
749
+ defaults.MergeFrom(found.overridable_features)
750
+ return defaults
751
+
752
+ def _InternFeatures(self, features):
753
+ serialized = features.SerializeToString()
754
+ with _edition_defaults_lock:
755
+ cached = self._feature_cache.get(serialized)
756
+ if cached is None:
757
+ self._feature_cache[serialized] = features
758
+ cached = features
759
+ return cached
760
+
761
+ def _FindFileContainingSymbolInDb(self, symbol):
762
+ """Finds the file in descriptor DB containing the specified symbol.
763
+
764
+ Args:
765
+ symbol (str): The name of the symbol to search for.
766
+
767
+ Returns:
768
+ FileDescriptor: The file that contains the specified symbol.
769
+
770
+ Raises:
771
+ KeyError: if the file cannot be found in the descriptor database.
772
+ """
773
+ try:
774
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
775
+ except KeyError as error:
776
+ if self._descriptor_db:
777
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
778
+ else:
779
+ raise error
780
+ if not file_proto:
781
+ raise KeyError('Cannot find a file containing %s' % symbol)
782
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
783
+
784
+ def _ConvertFileProtoToFileDescriptor(self, file_proto):
785
+ """Creates a FileDescriptor from a proto or returns a cached copy.
786
+
787
+ This method also has the side effect of loading all the symbols found in
788
+ the file into the appropriate dictionaries in the pool.
789
+
790
+ Args:
791
+ file_proto: The proto to convert.
792
+
793
+ Returns:
794
+ A FileDescriptor matching the passed in proto.
795
+ """
796
+ if file_proto.name not in self._file_descriptors:
797
+ built_deps = list(self._GetDeps(file_proto.dependency))
798
+ direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
799
+ public_deps = [direct_deps[i] for i in file_proto.public_dependency]
800
+
801
+ # pylint: disable=g-import-not-at-top
802
+ from google.protobuf import descriptor_pb2
803
+
804
+ file_descriptor = descriptor.FileDescriptor(
805
+ pool=self,
806
+ name=file_proto.name,
807
+ package=file_proto.package,
808
+ syntax=file_proto.syntax,
809
+ edition=descriptor_pb2.Edition.Name(file_proto.edition),
810
+ options=_OptionsOrNone(file_proto),
811
+ serialized_pb=file_proto.SerializeToString(),
812
+ dependencies=direct_deps,
813
+ public_dependencies=public_deps,
814
+ # pylint: disable=protected-access
815
+ create_key=descriptor._internal_create_key,
816
+ )
817
+ scope = {}
818
+
819
+ # This loop extracts all the message and enum types from all the
820
+ # dependencies of the file_proto. This is necessary to create the
821
+ # scope of available message types when defining the passed in
822
+ # file proto.
823
+ for dependency in built_deps:
824
+ scope.update(self._ExtractSymbols(
825
+ dependency.message_types_by_name.values()))
826
+ scope.update((_PrefixWithDot(enum.full_name), enum)
827
+ for enum in dependency.enum_types_by_name.values())
828
+
829
+ for message_type in file_proto.message_type:
830
+ message_desc = self._ConvertMessageDescriptor(
831
+ message_type, file_proto.package, file_descriptor, scope,
832
+ file_proto.syntax)
833
+ file_descriptor.message_types_by_name[message_desc.name] = (
834
+ message_desc)
835
+
836
+ for enum_type in file_proto.enum_type:
837
+ file_descriptor.enum_types_by_name[enum_type.name] = (
838
+ self._ConvertEnumDescriptor(enum_type, file_proto.package,
839
+ file_descriptor, None, scope, True))
840
+
841
+ for index, extension_proto in enumerate(file_proto.extension):
842
+ extension_desc = self._MakeFieldDescriptor(
843
+ extension_proto, file_proto.package, index, file_descriptor,
844
+ is_extension=True)
845
+ extension_desc.containing_type = self._GetTypeFromScope(
846
+ file_descriptor.package, extension_proto.extendee, scope)
847
+ self._SetFieldType(extension_proto, extension_desc,
848
+ file_descriptor.package, scope)
849
+ file_descriptor.extensions_by_name[extension_desc.name] = (
850
+ extension_desc)
851
+
852
+ for desc_proto in file_proto.message_type:
853
+ self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
854
+
855
+ if file_proto.package:
856
+ desc_proto_prefix = _PrefixWithDot(file_proto.package)
857
+ else:
858
+ desc_proto_prefix = ''
859
+
860
+ for desc_proto in file_proto.message_type:
861
+ desc = self._GetTypeFromScope(
862
+ desc_proto_prefix, desc_proto.name, scope)
863
+ file_descriptor.message_types_by_name[desc_proto.name] = desc
864
+
865
+ for index, service_proto in enumerate(file_proto.service):
866
+ file_descriptor.services_by_name[service_proto.name] = (
867
+ self._MakeServiceDescriptor(service_proto, index, scope,
868
+ file_proto.package, file_descriptor))
869
+
870
+ self._file_descriptors[file_proto.name] = file_descriptor
871
+
872
+ # Add extensions to the pool
873
+ def AddExtensionForNested(message_type):
874
+ for nested in message_type.nested_types:
875
+ AddExtensionForNested(nested)
876
+ for extension in message_type.extensions:
877
+ self._AddExtensionDescriptor(extension)
878
+
879
+ file_desc = self._file_descriptors[file_proto.name]
880
+ for extension in file_desc.extensions_by_name.values():
881
+ self._AddExtensionDescriptor(extension)
882
+ for message_type in file_desc.message_types_by_name.values():
883
+ AddExtensionForNested(message_type)
884
+
885
+ return file_desc
886
+
887
+ def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
888
+ scope=None, syntax=None):
889
+ """Adds the proto to the pool in the specified package.
890
+
891
+ Args:
892
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
893
+ package: The package the proto should be located in.
894
+ file_desc: The file containing this message.
895
+ scope: Dict mapping short and full symbols to message and enum types.
896
+ syntax: string indicating syntax of the file ("proto2" or "proto3")
897
+
898
+ Returns:
899
+ The added descriptor.
900
+ """
901
+
902
+ if package:
903
+ desc_name = '.'.join((package, desc_proto.name))
904
+ else:
905
+ desc_name = desc_proto.name
906
+
907
+ if file_desc is None:
908
+ file_name = None
909
+ else:
910
+ file_name = file_desc.name
911
+
912
+ if scope is None:
913
+ scope = {}
914
+
915
+ nested = [
916
+ self._ConvertMessageDescriptor(
917
+ nested, desc_name, file_desc, scope, syntax)
918
+ for nested in desc_proto.nested_type]
919
+ enums = [
920
+ self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
921
+ scope, False)
922
+ for enum in desc_proto.enum_type]
923
+ fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
924
+ for index, field in enumerate(desc_proto.field)]
925
+ extensions = [
926
+ self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
927
+ is_extension=True)
928
+ for index, extension in enumerate(desc_proto.extension)]
929
+ oneofs = [
930
+ # pylint: disable=g-complex-comprehension
931
+ descriptor.OneofDescriptor(
932
+ desc.name,
933
+ '.'.join((desc_name, desc.name)),
934
+ index,
935
+ None,
936
+ [],
937
+ _OptionsOrNone(desc),
938
+ # pylint: disable=protected-access
939
+ create_key=descriptor._internal_create_key)
940
+ for index, desc in enumerate(desc_proto.oneof_decl)
941
+ ]
942
+ extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
943
+ if extension_ranges:
944
+ is_extendable = True
945
+ else:
946
+ is_extendable = False
947
+ desc = descriptor.Descriptor(
948
+ name=desc_proto.name,
949
+ full_name=desc_name,
950
+ filename=file_name,
951
+ containing_type=None,
952
+ fields=fields,
953
+ oneofs=oneofs,
954
+ nested_types=nested,
955
+ enum_types=enums,
956
+ extensions=extensions,
957
+ options=_OptionsOrNone(desc_proto),
958
+ is_extendable=is_extendable,
959
+ extension_ranges=extension_ranges,
960
+ file=file_desc,
961
+ serialized_start=None,
962
+ serialized_end=None,
963
+ is_map_entry=desc_proto.options.map_entry,
964
+ # pylint: disable=protected-access
965
+ create_key=descriptor._internal_create_key,
966
+ )
967
+ for nested in desc.nested_types:
968
+ nested.containing_type = desc
969
+ for enum in desc.enum_types:
970
+ enum.containing_type = desc
971
+ for field_index, field_desc in enumerate(desc_proto.field):
972
+ if field_desc.HasField('oneof_index'):
973
+ oneof_index = field_desc.oneof_index
974
+ oneofs[oneof_index].fields.append(fields[field_index])
975
+ fields[field_index].containing_oneof = oneofs[oneof_index]
976
+
977
+ scope[_PrefixWithDot(desc_name)] = desc
978
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
979
+ self._descriptors[desc_name] = desc
980
+ return desc
981
+
982
+ def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
983
+ containing_type=None, scope=None, top_level=False):
984
+ """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
985
+
986
+ Args:
987
+ enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
988
+ package: Optional package name for the new message EnumDescriptor.
989
+ file_desc: The file containing the enum descriptor.
990
+ containing_type: The type containing this enum.
991
+ scope: Scope containing available types.
992
+ top_level: If True, the enum is a top level symbol. If False, the enum
993
+ is defined inside a message.
994
+
995
+ Returns:
996
+ The added descriptor
997
+ """
998
+
999
+ if package:
1000
+ enum_name = '.'.join((package, enum_proto.name))
1001
+ else:
1002
+ enum_name = enum_proto.name
1003
+
1004
+ if file_desc is None:
1005
+ file_name = None
1006
+ else:
1007
+ file_name = file_desc.name
1008
+
1009
+ values = [self._MakeEnumValueDescriptor(value, index)
1010
+ for index, value in enumerate(enum_proto.value)]
1011
+ desc = descriptor.EnumDescriptor(name=enum_proto.name,
1012
+ full_name=enum_name,
1013
+ filename=file_name,
1014
+ file=file_desc,
1015
+ values=values,
1016
+ containing_type=containing_type,
1017
+ options=_OptionsOrNone(enum_proto),
1018
+ # pylint: disable=protected-access
1019
+ create_key=descriptor._internal_create_key)
1020
+ scope['.%s' % enum_name] = desc
1021
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
1022
+ self._enum_descriptors[enum_name] = desc
1023
+
1024
+ # Add top level enum values.
1025
+ if top_level:
1026
+ for value in values:
1027
+ full_name = _NormalizeFullyQualifiedName(
1028
+ '.'.join((package, value.name)))
1029
+ self._CheckConflictRegister(value, full_name, file_name)
1030
+ self._top_enum_values[full_name] = value
1031
+
1032
+ return desc
1033
+
1034
+ def _MakeFieldDescriptor(self, field_proto, message_name, index,
1035
+ file_desc, is_extension=False):
1036
+ """Creates a field descriptor from a FieldDescriptorProto.
1037
+
1038
+ For message and enum type fields, this method will do a look up
1039
+ in the pool for the appropriate descriptor for that type. If it
1040
+ is unavailable, it will fall back to the _source function to
1041
+ create it. If this type is still unavailable, construction will
1042
+ fail.
1043
+
1044
+ Args:
1045
+ field_proto: The proto describing the field.
1046
+ message_name: The name of the containing message.
1047
+ index: Index of the field
1048
+ file_desc: The file containing the field descriptor.
1049
+ is_extension: Indication that this field is for an extension.
1050
+
1051
+ Returns:
1052
+ An initialized FieldDescriptor object
1053
+ """
1054
+
1055
+ if message_name:
1056
+ full_name = '.'.join((message_name, field_proto.name))
1057
+ else:
1058
+ full_name = field_proto.name
1059
+
1060
+ if field_proto.json_name:
1061
+ json_name = field_proto.json_name
1062
+ else:
1063
+ json_name = None
1064
+
1065
+ return descriptor.FieldDescriptor(
1066
+ name=field_proto.name,
1067
+ full_name=full_name,
1068
+ index=index,
1069
+ number=field_proto.number,
1070
+ type=field_proto.type,
1071
+ cpp_type=None,
1072
+ message_type=None,
1073
+ enum_type=None,
1074
+ containing_type=None,
1075
+ label=field_proto.label,
1076
+ has_default_value=False,
1077
+ default_value=None,
1078
+ is_extension=is_extension,
1079
+ extension_scope=None,
1080
+ options=_OptionsOrNone(field_proto),
1081
+ json_name=json_name,
1082
+ file=file_desc,
1083
+ # pylint: disable=protected-access
1084
+ create_key=descriptor._internal_create_key)
1085
+
1086
+ def _SetAllFieldTypes(self, package, desc_proto, scope):
1087
+ """Sets all the descriptor's fields's types.
1088
+
1089
+ This method also sets the containing types on any extensions.
1090
+
1091
+ Args:
1092
+ package: The current package of desc_proto.
1093
+ desc_proto: The message descriptor to update.
1094
+ scope: Enclosing scope of available types.
1095
+ """
1096
+
1097
+ package = _PrefixWithDot(package)
1098
+
1099
+ main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
1100
+
1101
+ if package == '.':
1102
+ nested_package = _PrefixWithDot(desc_proto.name)
1103
+ else:
1104
+ nested_package = '.'.join([package, desc_proto.name])
1105
+
1106
+ for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
1107
+ self._SetFieldType(field_proto, field_desc, nested_package, scope)
1108
+
1109
+ for extension_proto, extension_desc in (
1110
+ zip(desc_proto.extension, main_desc.extensions)):
1111
+ extension_desc.containing_type = self._GetTypeFromScope(
1112
+ nested_package, extension_proto.extendee, scope)
1113
+ self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
1114
+
1115
+ for nested_type in desc_proto.nested_type:
1116
+ self._SetAllFieldTypes(nested_package, nested_type, scope)
1117
+
1118
+ def _SetFieldType(self, field_proto, field_desc, package, scope):
1119
+ """Sets the field's type, cpp_type, message_type and enum_type.
1120
+
1121
+ Args:
1122
+ field_proto: Data about the field in proto format.
1123
+ field_desc: The descriptor to modify.
1124
+ package: The package the field's container is in.
1125
+ scope: Enclosing scope of available types.
1126
+ """
1127
+ if field_proto.type_name:
1128
+ desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
1129
+ else:
1130
+ desc = None
1131
+
1132
+ if not field_proto.HasField('type'):
1133
+ if isinstance(desc, descriptor.Descriptor):
1134
+ field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
1135
+ else:
1136
+ field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
1137
+
1138
+ field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
1139
+ field_proto.type)
1140
+
1141
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
1142
+ or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
1143
+ field_desc.message_type = desc
1144
+
1145
+ if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1146
+ field_desc.enum_type = desc
1147
+
1148
+ if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
1149
+ field_desc.has_default_value = False
1150
+ field_desc.default_value = []
1151
+ elif field_proto.HasField('default_value'):
1152
+ field_desc.has_default_value = True
1153
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
1154
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
1155
+ field_desc.default_value = float(field_proto.default_value)
1156
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
1157
+ field_desc.default_value = field_proto.default_value
1158
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
1159
+ field_desc.default_value = field_proto.default_value.lower() == 'true'
1160
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1161
+ field_desc.default_value = field_desc.enum_type.values_by_name[
1162
+ field_proto.default_value].number
1163
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
1164
+ field_desc.default_value = text_encoding.CUnescape(
1165
+ field_proto.default_value)
1166
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
1167
+ field_desc.default_value = None
1168
+ else:
1169
+ # All other types are of the "int" type.
1170
+ field_desc.default_value = int(field_proto.default_value)
1171
+ else:
1172
+ field_desc.has_default_value = False
1173
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
1174
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
1175
+ field_desc.default_value = 0.0
1176
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
1177
+ field_desc.default_value = u''
1178
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
1179
+ field_desc.default_value = False
1180
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1181
+ field_desc.default_value = field_desc.enum_type.values[0].number
1182
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
1183
+ field_desc.default_value = b''
1184
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
1185
+ field_desc.default_value = None
1186
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
1187
+ field_desc.default_value = None
1188
+ else:
1189
+ # All other types are of the "int" type.
1190
+ field_desc.default_value = 0
1191
+
1192
+ field_desc.type = field_proto.type
1193
+
1194
+ def _MakeEnumValueDescriptor(self, value_proto, index):
1195
+ """Creates a enum value descriptor object from a enum value proto.
1196
+
1197
+ Args:
1198
+ value_proto: The proto describing the enum value.
1199
+ index: The index of the enum value.
1200
+
1201
+ Returns:
1202
+ An initialized EnumValueDescriptor object.
1203
+ """
1204
+
1205
+ return descriptor.EnumValueDescriptor(
1206
+ name=value_proto.name,
1207
+ index=index,
1208
+ number=value_proto.number,
1209
+ options=_OptionsOrNone(value_proto),
1210
+ type=None,
1211
+ # pylint: disable=protected-access
1212
+ create_key=descriptor._internal_create_key)
1213
+
1214
+ def _MakeServiceDescriptor(self, service_proto, service_index, scope,
1215
+ package, file_desc):
1216
+ """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
1217
+
1218
+ Args:
1219
+ service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
1220
+ service_index: The index of the service in the File.
1221
+ scope: Dict mapping short and full symbols to message and enum types.
1222
+ package: Optional package name for the new message EnumDescriptor.
1223
+ file_desc: The file containing the service descriptor.
1224
+
1225
+ Returns:
1226
+ The added descriptor.
1227
+ """
1228
+
1229
+ if package:
1230
+ service_name = '.'.join((package, service_proto.name))
1231
+ else:
1232
+ service_name = service_proto.name
1233
+
1234
+ methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
1235
+ scope, index)
1236
+ for index, method_proto in enumerate(service_proto.method)]
1237
+ desc = descriptor.ServiceDescriptor(
1238
+ name=service_proto.name,
1239
+ full_name=service_name,
1240
+ index=service_index,
1241
+ methods=methods,
1242
+ options=_OptionsOrNone(service_proto),
1243
+ file=file_desc,
1244
+ # pylint: disable=protected-access
1245
+ create_key=descriptor._internal_create_key)
1246
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
1247
+ self._service_descriptors[service_name] = desc
1248
+ return desc
1249
+
1250
+ def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
1251
+ index):
1252
+ """Creates a method descriptor from a MethodDescriptorProto.
1253
+
1254
+ Args:
1255
+ method_proto: The proto describing the method.
1256
+ service_name: The name of the containing service.
1257
+ package: Optional package name to look up for types.
1258
+ scope: Scope containing available types.
1259
+ index: Index of the method in the service.
1260
+
1261
+ Returns:
1262
+ An initialized MethodDescriptor object.
1263
+ """
1264
+ full_name = '.'.join((service_name, method_proto.name))
1265
+ input_type = self._GetTypeFromScope(
1266
+ package, method_proto.input_type, scope)
1267
+ output_type = self._GetTypeFromScope(
1268
+ package, method_proto.output_type, scope)
1269
+ return descriptor.MethodDescriptor(
1270
+ name=method_proto.name,
1271
+ full_name=full_name,
1272
+ index=index,
1273
+ containing_service=None,
1274
+ input_type=input_type,
1275
+ output_type=output_type,
1276
+ client_streaming=method_proto.client_streaming,
1277
+ server_streaming=method_proto.server_streaming,
1278
+ options=_OptionsOrNone(method_proto),
1279
+ # pylint: disable=protected-access
1280
+ create_key=descriptor._internal_create_key)
1281
+
1282
+ def _ExtractSymbols(self, descriptors):
1283
+ """Pulls out all the symbols from descriptor protos.
1284
+
1285
+ Args:
1286
+ descriptors: The messages to extract descriptors from.
1287
+ Yields:
1288
+ A two element tuple of the type name and descriptor object.
1289
+ """
1290
+
1291
+ for desc in descriptors:
1292
+ yield (_PrefixWithDot(desc.full_name), desc)
1293
+ for symbol in self._ExtractSymbols(desc.nested_types):
1294
+ yield symbol
1295
+ for enum in desc.enum_types:
1296
+ yield (_PrefixWithDot(enum.full_name), enum)
1297
+
1298
+ def _GetDeps(self, dependencies, visited=None):
1299
+ """Recursively finds dependencies for file protos.
1300
+
1301
+ Args:
1302
+ dependencies: The names of the files being depended on.
1303
+ visited: The names of files already found.
1304
+
1305
+ Yields:
1306
+ Each direct and indirect dependency.
1307
+ """
1308
+
1309
+ visited = visited or set()
1310
+ for dependency in dependencies:
1311
+ if dependency not in visited:
1312
+ visited.add(dependency)
1313
+ dep_desc = self.FindFileByName(dependency)
1314
+ yield dep_desc
1315
+ public_files = [d.name for d in dep_desc.public_dependencies]
1316
+ yield from self._GetDeps(public_files, visited)
1317
+
1318
+ def _GetTypeFromScope(self, package, type_name, scope):
1319
+ """Finds a given type name in the current scope.
1320
+
1321
+ Args:
1322
+ package: The package the proto should be located in.
1323
+ type_name: The name of the type to be found in the scope.
1324
+ scope: Dict mapping short and full symbols to message and enum types.
1325
+
1326
+ Returns:
1327
+ The descriptor for the requested type.
1328
+ """
1329
+ if type_name not in scope:
1330
+ components = _PrefixWithDot(package).split('.')
1331
+ while components:
1332
+ possible_match = '.'.join(components + [type_name])
1333
+ if possible_match in scope:
1334
+ type_name = possible_match
1335
+ break
1336
+ else:
1337
+ components.pop(-1)
1338
+ return scope[type_name]
1339
+
1340
+
1341
+ def _PrefixWithDot(name):
1342
+ return name if name.startswith('.') else '.%s' % name
1343
+
1344
+
1345
+ if _USE_C_DESCRIPTORS:
1346
+ # TODO: This pool could be constructed from Python code, when we
1347
+ # support a flag like 'use_cpp_generated_pool=True'.
1348
+ # pylint: disable=protected-access
1349
+ _DEFAULT = descriptor._message.default_pool
1350
+ else:
1351
+ _DEFAULT = DescriptorPool()
1352
+
1353
+
1354
+ def Default():
1355
+ return _DEFAULT
parrot/lib/python3.10/site-packages/google/protobuf/duration_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/duration.proto
5
+ # Protobuf Python Version: 5.28.2
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 2,
17
+ '',
18
+ 'google/protobuf/duration.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\":\n\x08\x44uration\x12\x18\n\x07seconds\x18\x01 \x01(\x03R\x07seconds\x12\x14\n\x05nanos\x18\x02 \x01(\x05R\x05nanosB\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_DURATION']._serialized_start=51
36
+ _globals['_DURATION']._serialized_end=109
37
+ # @@protoc_insertion_point(module_scope)
parrot/lib/python3.10/site-packages/google/protobuf/empty_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/empty.proto
5
+ # Protobuf Python Version: 5.28.2
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 2,
17
+ '',
18
+ 'google/protobuf/empty.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_EMPTY']._serialized_start=48
36
+ _globals['_EMPTY']._serialized_end=55
37
+ # @@protoc_insertion_point(module_scope)
parrot/lib/python3.10/site-packages/google/protobuf/field_mask_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/field_mask.proto
5
+ # Protobuf Python Version: 5.28.2
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 2,
17
+ '',
18
+ 'google/protobuf/field_mask.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"!\n\tFieldMask\x12\x14\n\x05paths\x18\x01 \x03(\tR\x05pathsB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_FIELDMASK']._serialized_start=53
36
+ _globals['_FIELDMASK']._serialized_end=86
37
+ # @@protoc_insertion_point(module_scope)
parrot/lib/python3.10/site-packages/google/protobuf/internal/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
parrot/lib/python3.10/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-310.pyc ADDED
Binary file (2.5 kB). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-310.pyc ADDED
Binary file (22.7 kB). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-310.pyc ADDED
Binary file (5.61 kB). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/internal/_parameterized.py ADDED
@@ -0,0 +1,420 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #! /usr/bin/env python
2
+ #
3
+ # Protocol Buffers - Google's data interchange format
4
+ # Copyright 2008 Google Inc. All rights reserved.
5
+ #
6
+ # Use of this source code is governed by a BSD-style
7
+ # license that can be found in the LICENSE file or at
8
+ # https://developers.google.com/open-source/licenses/bsd
9
+
10
+ """Adds support for parameterized tests to Python's unittest TestCase class.
11
+
12
+ A parameterized test is a method in a test case that is invoked with different
13
+ argument tuples.
14
+
15
+ A simple example:
16
+
17
+ class AdditionExample(_parameterized.TestCase):
18
+ @_parameterized.parameters(
19
+ (1, 2, 3),
20
+ (4, 5, 9),
21
+ (1, 1, 3))
22
+ def testAddition(self, op1, op2, result):
23
+ self.assertEqual(result, op1 + op2)
24
+
25
+
26
+ Each invocation is a separate test case and properly isolated just
27
+ like a normal test method, with its own setUp/tearDown cycle. In the
28
+ example above, there are three separate testcases, one of which will
29
+ fail due to an assertion error (1 + 1 != 3).
30
+
31
+ Parameters for individual test cases can be tuples (with positional parameters)
32
+ or dictionaries (with named parameters):
33
+
34
+ class AdditionExample(_parameterized.TestCase):
35
+ @_parameterized.parameters(
36
+ {'op1': 1, 'op2': 2, 'result': 3},
37
+ {'op1': 4, 'op2': 5, 'result': 9},
38
+ )
39
+ def testAddition(self, op1, op2, result):
40
+ self.assertEqual(result, op1 + op2)
41
+
42
+ If a parameterized test fails, the error message will show the
43
+ original test name (which is modified internally) and the arguments
44
+ for the specific invocation, which are part of the string returned by
45
+ the shortDescription() method on test cases.
46
+
47
+ The id method of the test, used internally by the unittest framework,
48
+ is also modified to show the arguments. To make sure that test names
49
+ stay the same across several invocations, object representations like
50
+
51
+ >>> class Foo(object):
52
+ ... pass
53
+ >>> repr(Foo())
54
+ '<__main__.Foo object at 0x23d8610>'
55
+
56
+ are turned into '<__main__.Foo>'. For even more descriptive names,
57
+ especially in test logs, you can use the named_parameters decorator. In
58
+ this case, only tuples are supported, and the first parameters has to
59
+ be a string (or an object that returns an apt name when converted via
60
+ str()):
61
+
62
+ class NamedExample(_parameterized.TestCase):
63
+ @_parameterized.named_parameters(
64
+ ('Normal', 'aa', 'aaa', True),
65
+ ('EmptyPrefix', '', 'abc', True),
66
+ ('BothEmpty', '', '', True))
67
+ def testStartsWith(self, prefix, string, result):
68
+ self.assertEqual(result, strings.startswith(prefix))
69
+
70
+ Named tests also have the benefit that they can be run individually
71
+ from the command line:
72
+
73
+ $ testmodule.py NamedExample.testStartsWithNormal
74
+ .
75
+ --------------------------------------------------------------------
76
+ Ran 1 test in 0.000s
77
+
78
+ OK
79
+
80
+ Parameterized Classes
81
+ =====================
82
+ If invocation arguments are shared across test methods in a single
83
+ TestCase class, instead of decorating all test methods
84
+ individually, the class itself can be decorated:
85
+
86
+ @_parameterized.parameters(
87
+ (1, 2, 3)
88
+ (4, 5, 9))
89
+ class ArithmeticTest(_parameterized.TestCase):
90
+ def testAdd(self, arg1, arg2, result):
91
+ self.assertEqual(arg1 + arg2, result)
92
+
93
+ def testSubtract(self, arg2, arg2, result):
94
+ self.assertEqual(result - arg1, arg2)
95
+
96
+ Inputs from Iterables
97
+ =====================
98
+ If parameters should be shared across several test cases, or are dynamically
99
+ created from other sources, a single non-tuple iterable can be passed into
100
+ the decorator. This iterable will be used to obtain the test cases:
101
+
102
+ class AdditionExample(_parameterized.TestCase):
103
+ @_parameterized.parameters(
104
+ c.op1, c.op2, c.result for c in testcases
105
+ )
106
+ def testAddition(self, op1, op2, result):
107
+ self.assertEqual(result, op1 + op2)
108
+
109
+
110
+ Single-Argument Test Methods
111
+ ============================
112
+ If a test method takes only one argument, the single argument does not need to
113
+ be wrapped into a tuple:
114
+
115
+ class NegativeNumberExample(_parameterized.TestCase):
116
+ @_parameterized.parameters(
117
+ -1, -3, -4, -5
118
+ )
119
+ def testIsNegative(self, arg):
120
+ self.assertTrue(IsNegative(arg))
121
+ """
122
+
123
+ __author__ = 'tmarek@google.com (Torsten Marek)'
124
+
125
+ import functools
126
+ import re
127
+ import types
128
+ import unittest
129
+ import uuid
130
+
131
+ try:
132
+ # Since python 3
133
+ import collections.abc as collections_abc
134
+ except ImportError:
135
+ # Won't work after python 3.8
136
+ import collections as collections_abc
137
+
138
+ ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
139
+ _SEPARATOR = uuid.uuid1().hex
140
+ _FIRST_ARG = object()
141
+ _ARGUMENT_REPR = object()
142
+
143
+
144
+ def _CleanRepr(obj):
145
+ return ADDR_RE.sub(r'<\1>', repr(obj))
146
+
147
+
148
+ # Helper function formerly from the unittest module, removed from it in
149
+ # Python 2.7.
150
+ def _StrClass(cls):
151
+ return '%s.%s' % (cls.__module__, cls.__name__)
152
+
153
+
154
+ def _NonStringIterable(obj):
155
+ return (isinstance(obj, collections_abc.Iterable) and
156
+ not isinstance(obj, str))
157
+
158
+
159
+ def _FormatParameterList(testcase_params):
160
+ if isinstance(testcase_params, collections_abc.Mapping):
161
+ return ', '.join('%s=%s' % (argname, _CleanRepr(value))
162
+ for argname, value in testcase_params.items())
163
+ elif _NonStringIterable(testcase_params):
164
+ return ', '.join(map(_CleanRepr, testcase_params))
165
+ else:
166
+ return _FormatParameterList((testcase_params,))
167
+
168
+
169
+ class _ParameterizedTestIter(object):
170
+ """Callable and iterable class for producing new test cases."""
171
+
172
+ def __init__(self, test_method, testcases, naming_type):
173
+ """Returns concrete test functions for a test and a list of parameters.
174
+
175
+ The naming_type is used to determine the name of the concrete
176
+ functions as reported by the unittest framework. If naming_type is
177
+ _FIRST_ARG, the testcases must be tuples, and the first element must
178
+ have a string representation that is a valid Python identifier.
179
+
180
+ Args:
181
+ test_method: The decorated test method.
182
+ testcases: (list of tuple/dict) A list of parameter
183
+ tuples/dicts for individual test invocations.
184
+ naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
185
+ """
186
+ self._test_method = test_method
187
+ self.testcases = testcases
188
+ self._naming_type = naming_type
189
+
190
+ def __call__(self, *args, **kwargs):
191
+ raise RuntimeError('You appear to be running a parameterized test case '
192
+ 'without having inherited from parameterized.'
193
+ 'TestCase. This is bad because none of '
194
+ 'your test cases are actually being run.')
195
+
196
+ def __iter__(self):
197
+ test_method = self._test_method
198
+ naming_type = self._naming_type
199
+
200
+ def MakeBoundParamTest(testcase_params):
201
+ @functools.wraps(test_method)
202
+ def BoundParamTest(self):
203
+ if isinstance(testcase_params, collections_abc.Mapping):
204
+ test_method(self, **testcase_params)
205
+ elif _NonStringIterable(testcase_params):
206
+ test_method(self, *testcase_params)
207
+ else:
208
+ test_method(self, testcase_params)
209
+
210
+ if naming_type is _FIRST_ARG:
211
+ # Signal the metaclass that the name of the test function is unique
212
+ # and descriptive.
213
+ BoundParamTest.__x_use_name__ = True
214
+ BoundParamTest.__name__ += str(testcase_params[0])
215
+ testcase_params = testcase_params[1:]
216
+ elif naming_type is _ARGUMENT_REPR:
217
+ # __x_extra_id__ is used to pass naming information to the __new__
218
+ # method of TestGeneratorMetaclass.
219
+ # The metaclass will make sure to create a unique, but nondescriptive
220
+ # name for this test.
221
+ BoundParamTest.__x_extra_id__ = '(%s)' % (
222
+ _FormatParameterList(testcase_params),)
223
+ else:
224
+ raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
225
+
226
+ BoundParamTest.__doc__ = '%s(%s)' % (
227
+ BoundParamTest.__name__, _FormatParameterList(testcase_params))
228
+ if test_method.__doc__:
229
+ BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
230
+ return BoundParamTest
231
+ return (MakeBoundParamTest(c) for c in self.testcases)
232
+
233
+
234
+ def _IsSingletonList(testcases):
235
+ """True iff testcases contains only a single non-tuple element."""
236
+ return len(testcases) == 1 and not isinstance(testcases[0], tuple)
237
+
238
+
239
+ def _ModifyClass(class_object, testcases, naming_type):
240
+ assert not getattr(class_object, '_id_suffix', None), (
241
+ 'Cannot add parameters to %s,'
242
+ ' which already has parameterized methods.' % (class_object,))
243
+ class_object._id_suffix = id_suffix = {}
244
+ # We change the size of __dict__ while we iterate over it,
245
+ # which Python 3.x will complain about, so use copy().
246
+ for name, obj in class_object.__dict__.copy().items():
247
+ if (name.startswith(unittest.TestLoader.testMethodPrefix)
248
+ and isinstance(obj, types.FunctionType)):
249
+ delattr(class_object, name)
250
+ methods = {}
251
+ _UpdateClassDictForParamTestCase(
252
+ methods, id_suffix, name,
253
+ _ParameterizedTestIter(obj, testcases, naming_type))
254
+ for name, meth in methods.items():
255
+ setattr(class_object, name, meth)
256
+
257
+
258
+ def _ParameterDecorator(naming_type, testcases):
259
+ """Implementation of the parameterization decorators.
260
+
261
+ Args:
262
+ naming_type: The naming type.
263
+ testcases: Testcase parameters.
264
+
265
+ Returns:
266
+ A function for modifying the decorated object.
267
+ """
268
+ def _Apply(obj):
269
+ if isinstance(obj, type):
270
+ _ModifyClass(
271
+ obj,
272
+ list(testcases) if not isinstance(testcases, collections_abc.Sequence)
273
+ else testcases,
274
+ naming_type)
275
+ return obj
276
+ else:
277
+ return _ParameterizedTestIter(obj, testcases, naming_type)
278
+
279
+ if _IsSingletonList(testcases):
280
+ assert _NonStringIterable(testcases[0]), (
281
+ 'Single parameter argument must be a non-string iterable')
282
+ testcases = testcases[0]
283
+
284
+ return _Apply
285
+
286
+
287
+ def parameters(*testcases): # pylint: disable=invalid-name
288
+ """A decorator for creating parameterized tests.
289
+
290
+ See the module docstring for a usage example.
291
+ Args:
292
+ *testcases: Parameters for the decorated method, either a single
293
+ iterable, or a list of tuples/dicts/objects (for tests
294
+ with only one argument).
295
+
296
+ Returns:
297
+ A test generator to be handled by TestGeneratorMetaclass.
298
+ """
299
+ return _ParameterDecorator(_ARGUMENT_REPR, testcases)
300
+
301
+
302
+ def named_parameters(*testcases): # pylint: disable=invalid-name
303
+ """A decorator for creating parameterized tests.
304
+
305
+ See the module docstring for a usage example. The first element of
306
+ each parameter tuple should be a string and will be appended to the
307
+ name of the test method.
308
+
309
+ Args:
310
+ *testcases: Parameters for the decorated method, either a single
311
+ iterable, or a list of tuples.
312
+
313
+ Returns:
314
+ A test generator to be handled by TestGeneratorMetaclass.
315
+ """
316
+ return _ParameterDecorator(_FIRST_ARG, testcases)
317
+
318
+
319
+ class TestGeneratorMetaclass(type):
320
+ """Metaclass for test cases with test generators.
321
+
322
+ A test generator is an iterable in a testcase that produces callables. These
323
+ callables must be single-argument methods. These methods are injected into
324
+ the class namespace and the original iterable is removed. If the name of the
325
+ iterable conforms to the test pattern, the injected methods will be picked
326
+ up as tests by the unittest framework.
327
+
328
+ In general, it is supposed to be used in conjunction with the
329
+ parameters decorator.
330
+ """
331
+
332
+ def __new__(mcs, class_name, bases, dct):
333
+ dct['_id_suffix'] = id_suffix = {}
334
+ for name, obj in dct.copy().items():
335
+ if (name.startswith(unittest.TestLoader.testMethodPrefix) and
336
+ _NonStringIterable(obj)):
337
+ iterator = iter(obj)
338
+ dct.pop(name)
339
+ _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
340
+
341
+ return type.__new__(mcs, class_name, bases, dct)
342
+
343
+
344
+ def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
345
+ """Adds individual test cases to a dictionary.
346
+
347
+ Args:
348
+ dct: The target dictionary.
349
+ id_suffix: The dictionary for mapping names to test IDs.
350
+ name: The original name of the test case.
351
+ iterator: The iterator generating the individual test cases.
352
+ """
353
+ for idx, func in enumerate(iterator):
354
+ assert callable(func), 'Test generators must yield callables, got %r' % (
355
+ func,)
356
+ if getattr(func, '__x_use_name__', False):
357
+ new_name = func.__name__
358
+ else:
359
+ new_name = '%s%s%d' % (name, _SEPARATOR, idx)
360
+ assert new_name not in dct, (
361
+ 'Name of parameterized test case "%s" not unique' % (new_name,))
362
+ dct[new_name] = func
363
+ id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
364
+
365
+
366
+ class TestCase(unittest.TestCase, metaclass=TestGeneratorMetaclass):
367
+ """Base class for test cases using the parameters decorator."""
368
+
369
+ def _OriginalName(self):
370
+ return self._testMethodName.split(_SEPARATOR)[0]
371
+
372
+ def __str__(self):
373
+ return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
374
+
375
+ def id(self): # pylint: disable=invalid-name
376
+ """Returns the descriptive ID of the test.
377
+
378
+ This is used internally by the unittesting framework to get a name
379
+ for the test to be used in reports.
380
+
381
+ Returns:
382
+ The test id.
383
+ """
384
+ return '%s.%s%s' % (_StrClass(self.__class__),
385
+ self._OriginalName(),
386
+ self._id_suffix.get(self._testMethodName, ''))
387
+
388
+
389
+ def CoopTestCase(other_base_class):
390
+ """Returns a new base class with a cooperative metaclass base.
391
+
392
+ This enables the TestCase to be used in combination
393
+ with other base classes that have custom metaclasses, such as
394
+ mox.MoxTestBase.
395
+
396
+ Only works with metaclasses that do not override type.__new__.
397
+
398
+ Example:
399
+
400
+ import google3
401
+ import mox
402
+
403
+ from google.protobuf.internal import _parameterized
404
+
405
+ class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
406
+ ...
407
+
408
+ Args:
409
+ other_base_class: (class) A test case base class.
410
+
411
+ Returns:
412
+ A new class object.
413
+ """
414
+ metaclass = type(
415
+ 'CoopMetaclass',
416
+ (other_base_class.__metaclass__,
417
+ TestGeneratorMetaclass), {})
418
+ return metaclass(
419
+ 'CoopTestCase',
420
+ (other_base_class, TestCase), {})
parrot/lib/python3.10/site-packages/google/protobuf/internal/api_implementation.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Determine which implementation of the protobuf API is used in this process.
9
+ """
10
+
11
+ import importlib
12
+ import os
13
+ import sys
14
+ import warnings
15
+
16
+ _GOOGLE3_PYTHON_UPB_DEFAULT = True
17
+
18
+
19
+ def _ApiVersionToImplementationType(api_version):
20
+ if api_version == 2:
21
+ return 'cpp'
22
+ if api_version == 1:
23
+ raise ValueError('api_version=1 is no longer supported.')
24
+ if api_version == 0:
25
+ return 'python'
26
+ return None
27
+
28
+
29
+ _implementation_type = None
30
+ try:
31
+ # pylint: disable=g-import-not-at-top
32
+ from google.protobuf.internal import _api_implementation
33
+ # The compile-time constants in the _api_implementation module can be used to
34
+ # switch to a certain implementation of the Python API at build time.
35
+ _implementation_type = _ApiVersionToImplementationType(
36
+ _api_implementation.api_version)
37
+ except ImportError:
38
+ pass # Unspecified by compiler flags.
39
+
40
+
41
+ def _CanImport(mod_name):
42
+ try:
43
+ mod = importlib.import_module(mod_name)
44
+ # Work around a known issue in the classic bootstrap .par import hook.
45
+ if not mod:
46
+ raise ImportError(mod_name + ' import succeeded but was None')
47
+ return True
48
+ except ImportError:
49
+ return False
50
+
51
+
52
+ if _implementation_type is None:
53
+ if _CanImport('google._upb._message'):
54
+ _implementation_type = 'upb'
55
+ elif _CanImport('google.protobuf.pyext._message'):
56
+ _implementation_type = 'cpp'
57
+ else:
58
+ _implementation_type = 'python'
59
+
60
+
61
+ # This environment variable can be used to switch to a certain implementation
62
+ # of the Python API, overriding the compile-time constants in the
63
+ # _api_implementation module. Right now only 'python', 'cpp' and 'upb' are
64
+ # valid values. Any other value will raise error.
65
+ _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
66
+ _implementation_type)
67
+
68
+ if _implementation_type not in ('python', 'cpp', 'upb'):
69
+ raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not '
70
+ 'supported. Please set to \'python\', \'cpp\' or '
71
+ '\'upb\'.'.format(_implementation_type))
72
+
73
+ if 'PyPy' in sys.version and _implementation_type == 'cpp':
74
+ warnings.warn('PyPy does not work yet with cpp protocol buffers. '
75
+ 'Falling back to the python implementation.')
76
+ _implementation_type = 'python'
77
+
78
+ _c_module = None
79
+
80
+ if _implementation_type == 'cpp':
81
+ try:
82
+ # pylint: disable=g-import-not-at-top
83
+ from google.protobuf.pyext import _message
84
+ sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message
85
+ _c_module = _message
86
+ del _message
87
+ except ImportError:
88
+ # TODO: fail back to python
89
+ warnings.warn(
90
+ 'Selected implementation cpp is not available.')
91
+ pass
92
+
93
+ if _implementation_type == 'upb':
94
+ try:
95
+ # pylint: disable=g-import-not-at-top
96
+ from google._upb import _message
97
+ _c_module = _message
98
+ del _message
99
+ except ImportError:
100
+ warnings.warn('Selected implementation upb is not available. '
101
+ 'Falling back to the python implementation.')
102
+ _implementation_type = 'python'
103
+ pass
104
+
105
+ # Detect if serialization should be deterministic by default
106
+ try:
107
+ # The presence of this module in a build allows the proto implementation to
108
+ # be upgraded merely via build deps.
109
+ #
110
+ # NOTE: Merely importing this automatically enables deterministic proto
111
+ # serialization for C++ code, but we still need to export it as a boolean so
112
+ # that we can do the same for `_implementation_type == 'python'`.
113
+ #
114
+ # NOTE2: It is possible for C++ code to enable deterministic serialization by
115
+ # default _without_ affecting Python code, if the C++ implementation is not in
116
+ # use by this module. That is intended behavior, so we don't actually expose
117
+ # this boolean outside of this module.
118
+ #
119
+ # pylint: disable=g-import-not-at-top,unused-import
120
+ from google.protobuf import enable_deterministic_proto_serialization
121
+ _python_deterministic_proto_serialization = True
122
+ except ImportError:
123
+ _python_deterministic_proto_serialization = False
124
+
125
+
126
+ # Usage of this function is discouraged. Clients shouldn't care which
127
+ # implementation of the API is in use. Note that there is no guarantee
128
+ # that differences between APIs will be maintained.
129
+ # Please don't use this function if possible.
130
+ def Type():
131
+ return _implementation_type
132
+
133
+
134
+ # See comment on 'Type' above.
135
+ # TODO: Remove the API, it returns a constant. b/228102101
136
+ def Version():
137
+ return 2
138
+
139
+
140
+ # For internal use only
141
+ def IsPythonDefaultSerializationDeterministic():
142
+ return _python_deterministic_proto_serialization
parrot/lib/python3.10/site-packages/google/protobuf/internal/builder.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Builds descriptors, message classes and services for generated _pb2.py.
9
+
10
+ This file is only called in python generated _pb2.py files. It builds
11
+ descriptors, message classes and services that users can directly use
12
+ in generated code.
13
+ """
14
+
15
+ __author__ = 'jieluo@google.com (Jie Luo)'
16
+
17
+ from google.protobuf.internal import enum_type_wrapper
18
+ from google.protobuf.internal import python_message
19
+ from google.protobuf import message as _message
20
+ from google.protobuf import reflection as _reflection
21
+ from google.protobuf import symbol_database as _symbol_database
22
+
23
+ _sym_db = _symbol_database.Default()
24
+
25
+
26
+ def BuildMessageAndEnumDescriptors(file_des, module):
27
+ """Builds message and enum descriptors.
28
+
29
+ Args:
30
+ file_des: FileDescriptor of the .proto file
31
+ module: Generated _pb2 module
32
+ """
33
+
34
+ def BuildNestedDescriptors(msg_des, prefix):
35
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
36
+ module_name = prefix + name.upper()
37
+ module[module_name] = nested_msg
38
+ BuildNestedDescriptors(nested_msg, module_name + '_')
39
+ for enum_des in msg_des.enum_types:
40
+ module[prefix + enum_des.name.upper()] = enum_des
41
+
42
+ for (name, msg_des) in file_des.message_types_by_name.items():
43
+ module_name = '_' + name.upper()
44
+ module[module_name] = msg_des
45
+ BuildNestedDescriptors(msg_des, module_name + '_')
46
+
47
+
48
+ def BuildTopDescriptorsAndMessages(file_des, module_name, module):
49
+ """Builds top level descriptors and message classes.
50
+
51
+ Args:
52
+ file_des: FileDescriptor of the .proto file
53
+ module_name: str, the name of generated _pb2 module
54
+ module: Generated _pb2 module
55
+ """
56
+
57
+ def BuildMessage(msg_des):
58
+ create_dict = {}
59
+ for (name, nested_msg) in msg_des.nested_types_by_name.items():
60
+ create_dict[name] = BuildMessage(nested_msg)
61
+ create_dict['DESCRIPTOR'] = msg_des
62
+ create_dict['__module__'] = module_name
63
+ message_class = _reflection.GeneratedProtocolMessageType(
64
+ msg_des.name, (_message.Message,), create_dict)
65
+ _sym_db.RegisterMessage(message_class)
66
+ return message_class
67
+
68
+ # top level enums
69
+ for (name, enum_des) in file_des.enum_types_by_name.items():
70
+ module['_' + name.upper()] = enum_des
71
+ module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des)
72
+ for enum_value in enum_des.values:
73
+ module[enum_value.name] = enum_value.number
74
+
75
+ # top level extensions
76
+ for (name, extension_des) in file_des.extensions_by_name.items():
77
+ module[name.upper() + '_FIELD_NUMBER'] = extension_des.number
78
+ module[name] = extension_des
79
+
80
+ # services
81
+ for (name, service) in file_des.services_by_name.items():
82
+ module['_' + name.upper()] = service
83
+
84
+ # Build messages.
85
+ for (name, msg_des) in file_des.message_types_by_name.items():
86
+ module[name] = BuildMessage(msg_des)
87
+
88
+
89
+ def AddHelpersToExtensions(file_des):
90
+ """no-op to keep old generated code work with new runtime.
91
+
92
+ Args:
93
+ file_des: FileDescriptor of the .proto file
94
+ """
95
+ # TODO: Remove this on-op
96
+ return
97
+
98
+
99
+ def BuildServices(file_des, module_name, module):
100
+ """Builds services classes and services stub class.
101
+
102
+ Args:
103
+ file_des: FileDescriptor of the .proto file
104
+ module_name: str, the name of generated _pb2 module
105
+ module: Generated _pb2 module
106
+ """
107
+ # pylint: disable=g-import-not-at-top
108
+ from google.protobuf import service_reflection
109
+ # pylint: enable=g-import-not-at-top
110
+ for (name, service) in file_des.services_by_name.items():
111
+ module[name] = service_reflection.GeneratedServiceType(
112
+ name, (),
113
+ dict(DESCRIPTOR=service, __module__=module_name))
114
+ stub_name = name + '_Stub'
115
+ module[stub_name] = service_reflection.GeneratedServiceStubType(
116
+ stub_name, (module[name],),
117
+ dict(DESCRIPTOR=service, __module__=module_name))
parrot/lib/python3.10/site-packages/google/protobuf/internal/containers.py ADDED
@@ -0,0 +1,677 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains container classes to represent different protocol buffer types.
9
+
10
+ This file defines container classes which represent categories of protocol
11
+ buffer field types which need extra maintenance. Currently these categories
12
+ are:
13
+
14
+ - Repeated scalar fields - These are all repeated fields which aren't
15
+ composite (e.g. they are of simple types like int32, string, etc).
16
+ - Repeated composite fields - Repeated fields which are composite. This
17
+ includes groups and nested messages.
18
+ """
19
+
20
+ import collections.abc
21
+ import copy
22
+ import pickle
23
+ from typing import (
24
+ Any,
25
+ Iterable,
26
+ Iterator,
27
+ List,
28
+ MutableMapping,
29
+ MutableSequence,
30
+ NoReturn,
31
+ Optional,
32
+ Sequence,
33
+ TypeVar,
34
+ Union,
35
+ overload,
36
+ )
37
+
38
+
39
+ _T = TypeVar('_T')
40
+ _K = TypeVar('_K')
41
+ _V = TypeVar('_V')
42
+
43
+
44
+ class BaseContainer(Sequence[_T]):
45
+ """Base container class."""
46
+
47
+ # Minimizes memory usage and disallows assignment to other attributes.
48
+ __slots__ = ['_message_listener', '_values']
49
+
50
+ def __init__(self, message_listener: Any) -> None:
51
+ """
52
+ Args:
53
+ message_listener: A MessageListener implementation.
54
+ The RepeatedScalarFieldContainer will call this object's
55
+ Modified() method when it is modified.
56
+ """
57
+ self._message_listener = message_listener
58
+ self._values = []
59
+
60
+ @overload
61
+ def __getitem__(self, key: int) -> _T:
62
+ ...
63
+
64
+ @overload
65
+ def __getitem__(self, key: slice) -> List[_T]:
66
+ ...
67
+
68
+ def __getitem__(self, key):
69
+ """Retrieves item by the specified key."""
70
+ return self._values[key]
71
+
72
+ def __len__(self) -> int:
73
+ """Returns the number of elements in the container."""
74
+ return len(self._values)
75
+
76
+ def __ne__(self, other: Any) -> bool:
77
+ """Checks if another instance isn't equal to this one."""
78
+ # The concrete classes should define __eq__.
79
+ return not self == other
80
+
81
+ __hash__ = None
82
+
83
+ def __repr__(self) -> str:
84
+ return repr(self._values)
85
+
86
+ def sort(self, *args, **kwargs) -> None:
87
+ # Continue to support the old sort_function keyword argument.
88
+ # This is expected to be a rare occurrence, so use LBYL to avoid
89
+ # the overhead of actually catching KeyError.
90
+ if 'sort_function' in kwargs:
91
+ kwargs['cmp'] = kwargs.pop('sort_function')
92
+ self._values.sort(*args, **kwargs)
93
+
94
+ def reverse(self) -> None:
95
+ self._values.reverse()
96
+
97
+
98
+ # TODO: Remove this. BaseContainer does *not* conform to
99
+ # MutableSequence, only its subclasses do.
100
+ collections.abc.MutableSequence.register(BaseContainer)
101
+
102
+
103
+ class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]):
104
+ """Simple, type-checked, list-like container for holding repeated scalars."""
105
+
106
+ # Disallows assignment to other attributes.
107
+ __slots__ = ['_type_checker']
108
+
109
+ def __init__(
110
+ self,
111
+ message_listener: Any,
112
+ type_checker: Any,
113
+ ) -> None:
114
+ """Args:
115
+
116
+ message_listener: A MessageListener implementation. The
117
+ RepeatedScalarFieldContainer will call this object's Modified() method
118
+ when it is modified.
119
+ type_checker: A type_checkers.ValueChecker instance to run on elements
120
+ inserted into this container.
121
+ """
122
+ super().__init__(message_listener)
123
+ self._type_checker = type_checker
124
+
125
+ def append(self, value: _T) -> None:
126
+ """Appends an item to the list. Similar to list.append()."""
127
+ self._values.append(self._type_checker.CheckValue(value))
128
+ if not self._message_listener.dirty:
129
+ self._message_listener.Modified()
130
+
131
+ def insert(self, key: int, value: _T) -> None:
132
+ """Inserts the item at the specified position. Similar to list.insert()."""
133
+ self._values.insert(key, self._type_checker.CheckValue(value))
134
+ if not self._message_listener.dirty:
135
+ self._message_listener.Modified()
136
+
137
+ def extend(self, elem_seq: Iterable[_T]) -> None:
138
+ """Extends by appending the given iterable. Similar to list.extend()."""
139
+ elem_seq_iter = iter(elem_seq)
140
+ new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
141
+ if new_values:
142
+ self._values.extend(new_values)
143
+ self._message_listener.Modified()
144
+
145
+ def MergeFrom(
146
+ self,
147
+ other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]],
148
+ ) -> None:
149
+ """Appends the contents of another repeated field of the same type to this
150
+ one. We do not check the types of the individual fields.
151
+ """
152
+ self._values.extend(other)
153
+ self._message_listener.Modified()
154
+
155
+ def remove(self, elem: _T):
156
+ """Removes an item from the list. Similar to list.remove()."""
157
+ self._values.remove(elem)
158
+ self._message_listener.Modified()
159
+
160
+ def pop(self, key: Optional[int] = -1) -> _T:
161
+ """Removes and returns an item at a given index. Similar to list.pop()."""
162
+ value = self._values[key]
163
+ self.__delitem__(key)
164
+ return value
165
+
166
+ @overload
167
+ def __setitem__(self, key: int, value: _T) -> None:
168
+ ...
169
+
170
+ @overload
171
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
172
+ ...
173
+
174
+ def __setitem__(self, key, value) -> None:
175
+ """Sets the item on the specified position."""
176
+ if isinstance(key, slice):
177
+ if key.step is not None:
178
+ raise ValueError('Extended slices not supported')
179
+ self._values[key] = map(self._type_checker.CheckValue, value)
180
+ self._message_listener.Modified()
181
+ else:
182
+ self._values[key] = self._type_checker.CheckValue(value)
183
+ self._message_listener.Modified()
184
+
185
+ def __delitem__(self, key: Union[int, slice]) -> None:
186
+ """Deletes the item at the specified position."""
187
+ del self._values[key]
188
+ self._message_listener.Modified()
189
+
190
+ def __eq__(self, other: Any) -> bool:
191
+ """Compares the current instance with another one."""
192
+ if self is other:
193
+ return True
194
+ # Special case for the same type which should be common and fast.
195
+ if isinstance(other, self.__class__):
196
+ return other._values == self._values
197
+ # We are presumably comparing against some other sequence type.
198
+ return other == self._values
199
+
200
+ def __deepcopy__(
201
+ self,
202
+ unused_memo: Any = None,
203
+ ) -> 'RepeatedScalarFieldContainer[_T]':
204
+ clone = RepeatedScalarFieldContainer(
205
+ copy.deepcopy(self._message_listener), self._type_checker)
206
+ clone.MergeFrom(self)
207
+ return clone
208
+
209
+ def __reduce__(self, **kwargs) -> NoReturn:
210
+ raise pickle.PickleError(
211
+ "Can't pickle repeated scalar fields, convert to list first")
212
+
213
+
214
+ # TODO: Constrain T to be a subtype of Message.
215
+ class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]):
216
+ """Simple, list-like container for holding repeated composite fields."""
217
+
218
+ # Disallows assignment to other attributes.
219
+ __slots__ = ['_message_descriptor']
220
+
221
+ def __init__(self, message_listener: Any, message_descriptor: Any) -> None:
222
+ """
223
+ Note that we pass in a descriptor instead of the generated directly,
224
+ since at the time we construct a _RepeatedCompositeFieldContainer we
225
+ haven't yet necessarily initialized the type that will be contained in the
226
+ container.
227
+
228
+ Args:
229
+ message_listener: A MessageListener implementation.
230
+ The RepeatedCompositeFieldContainer will call this object's
231
+ Modified() method when it is modified.
232
+ message_descriptor: A Descriptor instance describing the protocol type
233
+ that should be present in this container. We'll use the
234
+ _concrete_class field of this descriptor when the client calls add().
235
+ """
236
+ super().__init__(message_listener)
237
+ self._message_descriptor = message_descriptor
238
+
239
+ def add(self, **kwargs: Any) -> _T:
240
+ """Adds a new element at the end of the list and returns it. Keyword
241
+ arguments may be used to initialize the element.
242
+ """
243
+ new_element = self._message_descriptor._concrete_class(**kwargs)
244
+ new_element._SetListener(self._message_listener)
245
+ self._values.append(new_element)
246
+ if not self._message_listener.dirty:
247
+ self._message_listener.Modified()
248
+ return new_element
249
+
250
+ def append(self, value: _T) -> None:
251
+ """Appends one element by copying the message."""
252
+ new_element = self._message_descriptor._concrete_class()
253
+ new_element._SetListener(self._message_listener)
254
+ new_element.CopyFrom(value)
255
+ self._values.append(new_element)
256
+ if not self._message_listener.dirty:
257
+ self._message_listener.Modified()
258
+
259
+ def insert(self, key: int, value: _T) -> None:
260
+ """Inserts the item at the specified position by copying."""
261
+ new_element = self._message_descriptor._concrete_class()
262
+ new_element._SetListener(self._message_listener)
263
+ new_element.CopyFrom(value)
264
+ self._values.insert(key, new_element)
265
+ if not self._message_listener.dirty:
266
+ self._message_listener.Modified()
267
+
268
+ def extend(self, elem_seq: Iterable[_T]) -> None:
269
+ """Extends by appending the given sequence of elements of the same type
270
+
271
+ as this one, copying each individual message.
272
+ """
273
+ message_class = self._message_descriptor._concrete_class
274
+ listener = self._message_listener
275
+ values = self._values
276
+ for message in elem_seq:
277
+ new_element = message_class()
278
+ new_element._SetListener(listener)
279
+ new_element.MergeFrom(message)
280
+ values.append(new_element)
281
+ listener.Modified()
282
+
283
+ def MergeFrom(
284
+ self,
285
+ other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]],
286
+ ) -> None:
287
+ """Appends the contents of another repeated field of the same type to this
288
+ one, copying each individual message.
289
+ """
290
+ self.extend(other)
291
+
292
+ def remove(self, elem: _T) -> None:
293
+ """Removes an item from the list. Similar to list.remove()."""
294
+ self._values.remove(elem)
295
+ self._message_listener.Modified()
296
+
297
+ def pop(self, key: Optional[int] = -1) -> _T:
298
+ """Removes and returns an item at a given index. Similar to list.pop()."""
299
+ value = self._values[key]
300
+ self.__delitem__(key)
301
+ return value
302
+
303
+ @overload
304
+ def __setitem__(self, key: int, value: _T) -> None:
305
+ ...
306
+
307
+ @overload
308
+ def __setitem__(self, key: slice, value: Iterable[_T]) -> None:
309
+ ...
310
+
311
+ def __setitem__(self, key, value):
312
+ # This method is implemented to make RepeatedCompositeFieldContainer
313
+ # structurally compatible with typing.MutableSequence. It is
314
+ # otherwise unsupported and will always raise an error.
315
+ raise TypeError(
316
+ f'{self.__class__.__name__} object does not support item assignment')
317
+
318
+ def __delitem__(self, key: Union[int, slice]) -> None:
319
+ """Deletes the item at the specified position."""
320
+ del self._values[key]
321
+ self._message_listener.Modified()
322
+
323
+ def __eq__(self, other: Any) -> bool:
324
+ """Compares the current instance with another one."""
325
+ if self is other:
326
+ return True
327
+ if not isinstance(other, self.__class__):
328
+ raise TypeError('Can only compare repeated composite fields against '
329
+ 'other repeated composite fields.')
330
+ return self._values == other._values
331
+
332
+
333
+ class ScalarMap(MutableMapping[_K, _V]):
334
+ """Simple, type-checked, dict-like container for holding repeated scalars."""
335
+
336
+ # Disallows assignment to other attributes.
337
+ __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
338
+ '_entry_descriptor']
339
+
340
+ def __init__(
341
+ self,
342
+ message_listener: Any,
343
+ key_checker: Any,
344
+ value_checker: Any,
345
+ entry_descriptor: Any,
346
+ ) -> None:
347
+ """
348
+ Args:
349
+ message_listener: A MessageListener implementation.
350
+ The ScalarMap will call this object's Modified() method when it
351
+ is modified.
352
+ key_checker: A type_checkers.ValueChecker instance to run on keys
353
+ inserted into this container.
354
+ value_checker: A type_checkers.ValueChecker instance to run on values
355
+ inserted into this container.
356
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
357
+ """
358
+ self._message_listener = message_listener
359
+ self._key_checker = key_checker
360
+ self._value_checker = value_checker
361
+ self._entry_descriptor = entry_descriptor
362
+ self._values = {}
363
+
364
+ def __getitem__(self, key: _K) -> _V:
365
+ try:
366
+ return self._values[key]
367
+ except KeyError:
368
+ key = self._key_checker.CheckValue(key)
369
+ val = self._value_checker.DefaultValue()
370
+ self._values[key] = val
371
+ return val
372
+
373
+ def __contains__(self, item: _K) -> bool:
374
+ # We check the key's type to match the strong-typing flavor of the API.
375
+ # Also this makes it easier to match the behavior of the C++ implementation.
376
+ self._key_checker.CheckValue(item)
377
+ return item in self._values
378
+
379
+ @overload
380
+ def get(self, key: _K) -> Optional[_V]:
381
+ ...
382
+
383
+ @overload
384
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
385
+ ...
386
+
387
+ # We need to override this explicitly, because our defaultdict-like behavior
388
+ # will make the default implementation (from our base class) always insert
389
+ # the key.
390
+ def get(self, key, default=None):
391
+ if key in self:
392
+ return self[key]
393
+ else:
394
+ return default
395
+
396
+ def __setitem__(self, key: _K, value: _V) -> _T:
397
+ checked_key = self._key_checker.CheckValue(key)
398
+ checked_value = self._value_checker.CheckValue(value)
399
+ self._values[checked_key] = checked_value
400
+ self._message_listener.Modified()
401
+
402
+ def __delitem__(self, key: _K) -> None:
403
+ del self._values[key]
404
+ self._message_listener.Modified()
405
+
406
+ def __len__(self) -> int:
407
+ return len(self._values)
408
+
409
+ def __iter__(self) -> Iterator[_K]:
410
+ return iter(self._values)
411
+
412
+ def __repr__(self) -> str:
413
+ return repr(self._values)
414
+
415
+ def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None:
416
+ self._values.update(other._values)
417
+ self._message_listener.Modified()
418
+
419
+ def InvalidateIterators(self) -> None:
420
+ # It appears that the only way to reliably invalidate iterators to
421
+ # self._values is to ensure that its size changes.
422
+ original = self._values
423
+ self._values = original.copy()
424
+ original[None] = None
425
+
426
+ # This is defined in the abstract base, but we can do it much more cheaply.
427
+ def clear(self) -> None:
428
+ self._values.clear()
429
+ self._message_listener.Modified()
430
+
431
+ def GetEntryClass(self) -> Any:
432
+ return self._entry_descriptor._concrete_class
433
+
434
+
435
+ class MessageMap(MutableMapping[_K, _V]):
436
+ """Simple, type-checked, dict-like container for with submessage values."""
437
+
438
+ # Disallows assignment to other attributes.
439
+ __slots__ = ['_key_checker', '_values', '_message_listener',
440
+ '_message_descriptor', '_entry_descriptor']
441
+
442
+ def __init__(
443
+ self,
444
+ message_listener: Any,
445
+ message_descriptor: Any,
446
+ key_checker: Any,
447
+ entry_descriptor: Any,
448
+ ) -> None:
449
+ """
450
+ Args:
451
+ message_listener: A MessageListener implementation.
452
+ The ScalarMap will call this object's Modified() method when it
453
+ is modified.
454
+ key_checker: A type_checkers.ValueChecker instance to run on keys
455
+ inserted into this container.
456
+ value_checker: A type_checkers.ValueChecker instance to run on values
457
+ inserted into this container.
458
+ entry_descriptor: The MessageDescriptor of a map entry: key and value.
459
+ """
460
+ self._message_listener = message_listener
461
+ self._message_descriptor = message_descriptor
462
+ self._key_checker = key_checker
463
+ self._entry_descriptor = entry_descriptor
464
+ self._values = {}
465
+
466
+ def __getitem__(self, key: _K) -> _V:
467
+ key = self._key_checker.CheckValue(key)
468
+ try:
469
+ return self._values[key]
470
+ except KeyError:
471
+ new_element = self._message_descriptor._concrete_class()
472
+ new_element._SetListener(self._message_listener)
473
+ self._values[key] = new_element
474
+ self._message_listener.Modified()
475
+ return new_element
476
+
477
+ def get_or_create(self, key: _K) -> _V:
478
+ """get_or_create() is an alias for getitem (ie. map[key]).
479
+
480
+ Args:
481
+ key: The key to get or create in the map.
482
+
483
+ This is useful in cases where you want to be explicit that the call is
484
+ mutating the map. This can avoid lint errors for statements like this
485
+ that otherwise would appear to be pointless statements:
486
+
487
+ msg.my_map[key]
488
+ """
489
+ return self[key]
490
+
491
+ @overload
492
+ def get(self, key: _K) -> Optional[_V]:
493
+ ...
494
+
495
+ @overload
496
+ def get(self, key: _K, default: _T) -> Union[_V, _T]:
497
+ ...
498
+
499
+ # We need to override this explicitly, because our defaultdict-like behavior
500
+ # will make the default implementation (from our base class) always insert
501
+ # the key.
502
+ def get(self, key, default=None):
503
+ if key in self:
504
+ return self[key]
505
+ else:
506
+ return default
507
+
508
+ def __contains__(self, item: _K) -> bool:
509
+ item = self._key_checker.CheckValue(item)
510
+ return item in self._values
511
+
512
+ def __setitem__(self, key: _K, value: _V) -> NoReturn:
513
+ raise ValueError('May not set values directly, call my_map[key].foo = 5')
514
+
515
+ def __delitem__(self, key: _K) -> None:
516
+ key = self._key_checker.CheckValue(key)
517
+ del self._values[key]
518
+ self._message_listener.Modified()
519
+
520
+ def __len__(self) -> int:
521
+ return len(self._values)
522
+
523
+ def __iter__(self) -> Iterator[_K]:
524
+ return iter(self._values)
525
+
526
+ def __repr__(self) -> str:
527
+ return repr(self._values)
528
+
529
+ def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None:
530
+ # pylint: disable=protected-access
531
+ for key in other._values:
532
+ # According to documentation: "When parsing from the wire or when merging,
533
+ # if there are duplicate map keys the last key seen is used".
534
+ if key in self:
535
+ del self[key]
536
+ self[key].CopyFrom(other[key])
537
+ # self._message_listener.Modified() not required here, because
538
+ # mutations to submessages already propagate.
539
+
540
+ def InvalidateIterators(self) -> None:
541
+ # It appears that the only way to reliably invalidate iterators to
542
+ # self._values is to ensure that its size changes.
543
+ original = self._values
544
+ self._values = original.copy()
545
+ original[None] = None
546
+
547
+ # This is defined in the abstract base, but we can do it much more cheaply.
548
+ def clear(self) -> None:
549
+ self._values.clear()
550
+ self._message_listener.Modified()
551
+
552
+ def GetEntryClass(self) -> Any:
553
+ return self._entry_descriptor._concrete_class
554
+
555
+
556
+ class _UnknownField:
557
+ """A parsed unknown field."""
558
+
559
+ # Disallows assignment to other attributes.
560
+ __slots__ = ['_field_number', '_wire_type', '_data']
561
+
562
+ def __init__(self, field_number, wire_type, data):
563
+ self._field_number = field_number
564
+ self._wire_type = wire_type
565
+ self._data = data
566
+ return
567
+
568
+ def __lt__(self, other):
569
+ # pylint: disable=protected-access
570
+ return self._field_number < other._field_number
571
+
572
+ def __eq__(self, other):
573
+ if self is other:
574
+ return True
575
+ # pylint: disable=protected-access
576
+ return (self._field_number == other._field_number and
577
+ self._wire_type == other._wire_type and
578
+ self._data == other._data)
579
+
580
+
581
+ class UnknownFieldRef: # pylint: disable=missing-class-docstring
582
+
583
+ def __init__(self, parent, index):
584
+ self._parent = parent
585
+ self._index = index
586
+
587
+ def _check_valid(self):
588
+ if not self._parent:
589
+ raise ValueError('UnknownField does not exist. '
590
+ 'The parent message might be cleared.')
591
+ if self._index >= len(self._parent):
592
+ raise ValueError('UnknownField does not exist. '
593
+ 'The parent message might be cleared.')
594
+
595
+ @property
596
+ def field_number(self):
597
+ self._check_valid()
598
+ # pylint: disable=protected-access
599
+ return self._parent._internal_get(self._index)._field_number
600
+
601
+ @property
602
+ def wire_type(self):
603
+ self._check_valid()
604
+ # pylint: disable=protected-access
605
+ return self._parent._internal_get(self._index)._wire_type
606
+
607
+ @property
608
+ def data(self):
609
+ self._check_valid()
610
+ # pylint: disable=protected-access
611
+ return self._parent._internal_get(self._index)._data
612
+
613
+
614
+ class UnknownFieldSet:
615
+ """UnknownField container"""
616
+
617
+ # Disallows assignment to other attributes.
618
+ __slots__ = ['_values']
619
+
620
+ def __init__(self):
621
+ self._values = []
622
+
623
+ def __getitem__(self, index):
624
+ if self._values is None:
625
+ raise ValueError('UnknownFields does not exist. '
626
+ 'The parent message might be cleared.')
627
+ size = len(self._values)
628
+ if index < 0:
629
+ index += size
630
+ if index < 0 or index >= size:
631
+ raise IndexError('index %d out of range'.index)
632
+
633
+ return UnknownFieldRef(self, index)
634
+
635
+ def _internal_get(self, index):
636
+ return self._values[index]
637
+
638
+ def __len__(self):
639
+ if self._values is None:
640
+ raise ValueError('UnknownFields does not exist. '
641
+ 'The parent message might be cleared.')
642
+ return len(self._values)
643
+
644
+ def _add(self, field_number, wire_type, data):
645
+ unknown_field = _UnknownField(field_number, wire_type, data)
646
+ self._values.append(unknown_field)
647
+ return unknown_field
648
+
649
+ def __iter__(self):
650
+ for i in range(len(self)):
651
+ yield UnknownFieldRef(self, i)
652
+
653
+ def _extend(self, other):
654
+ if other is None:
655
+ return
656
+ # pylint: disable=protected-access
657
+ self._values.extend(other._values)
658
+
659
+ def __eq__(self, other):
660
+ if self is other:
661
+ return True
662
+ # Sort unknown fields because their order shouldn't
663
+ # affect equality test.
664
+ values = list(self._values)
665
+ if other is None:
666
+ return not values
667
+ values.sort()
668
+ # pylint: disable=protected-access
669
+ other_values = sorted(other._values)
670
+ return values == other_values
671
+
672
+ def _clear(self):
673
+ for value in self._values:
674
+ # pylint: disable=protected-access
675
+ if isinstance(value._data, UnknownFieldSet):
676
+ value._data._clear() # pylint: disable=protected-access
677
+ self._values = None
parrot/lib/python3.10/site-packages/google/protobuf/internal/decoder.py ADDED
@@ -0,0 +1,1036 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Code for decoding protocol buffer primitives.
9
+
10
+ This code is very similar to encoder.py -- read the docs for that module first.
11
+
12
+ A "decoder" is a function with the signature:
13
+ Decode(buffer, pos, end, message, field_dict)
14
+ The arguments are:
15
+ buffer: The string containing the encoded message.
16
+ pos: The current position in the string.
17
+ end: The position in the string where the current message ends. May be
18
+ less than len(buffer) if we're reading a sub-message.
19
+ message: The message object into which we're parsing.
20
+ field_dict: message._fields (avoids a hashtable lookup).
21
+ The decoder reads the field and stores it into field_dict, returning the new
22
+ buffer position. A decoder for a repeated field may proactively decode all of
23
+ the elements of that field, if they appear consecutively.
24
+
25
+ Note that decoders may throw any of the following:
26
+ IndexError: Indicates a truncated message.
27
+ struct.error: Unpacking of a fixed-width field failed.
28
+ message.DecodeError: Other errors.
29
+
30
+ Decoders are expected to raise an exception if they are called with pos > end.
31
+ This allows callers to be lax about bounds checking: it's fineto read past
32
+ "end" as long as you are sure that someone else will notice and throw an
33
+ exception later on.
34
+
35
+ Something up the call stack is expected to catch IndexError and struct.error
36
+ and convert them to message.DecodeError.
37
+
38
+ Decoders are constructed using decoder constructors with the signature:
39
+ MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
40
+ The arguments are:
41
+ field_number: The field number of the field we want to decode.
42
+ is_repeated: Is the field a repeated field? (bool)
43
+ is_packed: Is the field a packed field? (bool)
44
+ key: The key to use when looking up the field within field_dict.
45
+ (This is actually the FieldDescriptor but nothing in this
46
+ file should depend on that.)
47
+ new_default: A function which takes a message object as a parameter and
48
+ returns a new instance of the default value for this field.
49
+ (This is called for repeated fields and sub-messages, when an
50
+ instance does not already exist.)
51
+
52
+ As with encoders, we define a decoder constructor for every type of field.
53
+ Then, for every field of every message class we construct an actual decoder.
54
+ That decoder goes into a dict indexed by tag, so when we decode a message
55
+ we repeatedly read a tag, look up the corresponding decoder, and invoke it.
56
+ """
57
+
58
+ __author__ = 'kenton@google.com (Kenton Varda)'
59
+
60
+ import math
61
+ import struct
62
+
63
+ from google.protobuf import message
64
+ from google.protobuf.internal import containers
65
+ from google.protobuf.internal import encoder
66
+ from google.protobuf.internal import wire_format
67
+
68
+
69
+ # This is not for optimization, but rather to avoid conflicts with local
70
+ # variables named "message".
71
+ _DecodeError = message.DecodeError
72
+
73
+
74
+ def _VarintDecoder(mask, result_type):
75
+ """Return an encoder for a basic varint value (does not include tag).
76
+
77
+ Decoded values will be bitwise-anded with the given mask before being
78
+ returned, e.g. to limit them to 32 bits. The returned decoder does not
79
+ take the usual "end" parameter -- the caller is expected to do bounds checking
80
+ after the fact (often the caller can defer such checking until later). The
81
+ decoder returns a (value, new_pos) pair.
82
+ """
83
+
84
+ def DecodeVarint(buffer, pos: int=None):
85
+ result = 0
86
+ shift = 0
87
+ while 1:
88
+ if pos is None:
89
+ # Read from BytesIO
90
+ try:
91
+ b = buffer.read(1)[0]
92
+ except IndexError as e:
93
+ if shift == 0:
94
+ # End of BytesIO.
95
+ return None
96
+ else:
97
+ raise ValueError('Fail to read varint %s' % str(e))
98
+ else:
99
+ b = buffer[pos]
100
+ pos += 1
101
+ result |= ((b & 0x7f) << shift)
102
+ if not (b & 0x80):
103
+ result &= mask
104
+ result = result_type(result)
105
+ return result if pos is None else (result, pos)
106
+ shift += 7
107
+ if shift >= 64:
108
+ raise _DecodeError('Too many bytes when decoding varint.')
109
+
110
+ return DecodeVarint
111
+
112
+
113
+ def _SignedVarintDecoder(bits, result_type):
114
+ """Like _VarintDecoder() but decodes signed values."""
115
+
116
+ signbit = 1 << (bits - 1)
117
+ mask = (1 << bits) - 1
118
+
119
+ def DecodeVarint(buffer, pos):
120
+ result = 0
121
+ shift = 0
122
+ while 1:
123
+ b = buffer[pos]
124
+ result |= ((b & 0x7f) << shift)
125
+ pos += 1
126
+ if not (b & 0x80):
127
+ result &= mask
128
+ result = (result ^ signbit) - signbit
129
+ result = result_type(result)
130
+ return (result, pos)
131
+ shift += 7
132
+ if shift >= 64:
133
+ raise _DecodeError('Too many bytes when decoding varint.')
134
+ return DecodeVarint
135
+
136
+ # All 32-bit and 64-bit values are represented as int.
137
+ _DecodeVarint = _VarintDecoder((1 << 64) - 1, int)
138
+ _DecodeSignedVarint = _SignedVarintDecoder(64, int)
139
+
140
+ # Use these versions for values which must be limited to 32 bits.
141
+ _DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
142
+ _DecodeSignedVarint32 = _SignedVarintDecoder(32, int)
143
+
144
+
145
+ def ReadTag(buffer, pos):
146
+ """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple.
147
+
148
+ We return the raw bytes of the tag rather than decoding them. The raw
149
+ bytes can then be used to look up the proper decoder. This effectively allows
150
+ us to trade some work that would be done in pure-python (decoding a varint)
151
+ for work that is done in C (searching for a byte string in a hash table).
152
+ In a low-level language it would be much cheaper to decode the varint and
153
+ use that, but not in Python.
154
+
155
+ Args:
156
+ buffer: memoryview object of the encoded bytes
157
+ pos: int of the current position to start from
158
+
159
+ Returns:
160
+ Tuple[bytes, int] of the tag data and new position.
161
+ """
162
+ start = pos
163
+ while buffer[pos] & 0x80:
164
+ pos += 1
165
+ pos += 1
166
+
167
+ tag_bytes = buffer[start:pos].tobytes()
168
+ return tag_bytes, pos
169
+
170
+
171
+ # --------------------------------------------------------------------
172
+
173
+
174
+ def _SimpleDecoder(wire_type, decode_value):
175
+ """Return a constructor for a decoder for fields of a particular type.
176
+
177
+ Args:
178
+ wire_type: The field's wire type.
179
+ decode_value: A function which decodes an individual value, e.g.
180
+ _DecodeVarint()
181
+ """
182
+
183
+ def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default,
184
+ clear_if_default=False):
185
+ if is_packed:
186
+ local_DecodeVarint = _DecodeVarint
187
+ def DecodePackedField(buffer, pos, end, message, field_dict):
188
+ value = field_dict.get(key)
189
+ if value is None:
190
+ value = field_dict.setdefault(key, new_default(message))
191
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
192
+ endpoint += pos
193
+ if endpoint > end:
194
+ raise _DecodeError('Truncated message.')
195
+ while pos < endpoint:
196
+ (element, pos) = decode_value(buffer, pos)
197
+ value.append(element)
198
+ if pos > endpoint:
199
+ del value[-1] # Discard corrupt value.
200
+ raise _DecodeError('Packed element was truncated.')
201
+ return pos
202
+ return DecodePackedField
203
+ elif is_repeated:
204
+ tag_bytes = encoder.TagBytes(field_number, wire_type)
205
+ tag_len = len(tag_bytes)
206
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
207
+ value = field_dict.get(key)
208
+ if value is None:
209
+ value = field_dict.setdefault(key, new_default(message))
210
+ while 1:
211
+ (element, new_pos) = decode_value(buffer, pos)
212
+ value.append(element)
213
+ # Predict that the next tag is another copy of the same repeated
214
+ # field.
215
+ pos = new_pos + tag_len
216
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
217
+ # Prediction failed. Return.
218
+ if new_pos > end:
219
+ raise _DecodeError('Truncated message.')
220
+ return new_pos
221
+ return DecodeRepeatedField
222
+ else:
223
+ def DecodeField(buffer, pos, end, message, field_dict):
224
+ (new_value, pos) = decode_value(buffer, pos)
225
+ if pos > end:
226
+ raise _DecodeError('Truncated message.')
227
+ if clear_if_default and not new_value:
228
+ field_dict.pop(key, None)
229
+ else:
230
+ field_dict[key] = new_value
231
+ return pos
232
+ return DecodeField
233
+
234
+ return SpecificDecoder
235
+
236
+
237
+ def _ModifiedDecoder(wire_type, decode_value, modify_value):
238
+ """Like SimpleDecoder but additionally invokes modify_value on every value
239
+ before storing it. Usually modify_value is ZigZagDecode.
240
+ """
241
+
242
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
243
+ # not enough to make a significant difference.
244
+
245
+ def InnerDecode(buffer, pos):
246
+ (result, new_pos) = decode_value(buffer, pos)
247
+ return (modify_value(result), new_pos)
248
+ return _SimpleDecoder(wire_type, InnerDecode)
249
+
250
+
251
+ def _StructPackDecoder(wire_type, format):
252
+ """Return a constructor for a decoder for a fixed-width field.
253
+
254
+ Args:
255
+ wire_type: The field's wire type.
256
+ format: The format string to pass to struct.unpack().
257
+ """
258
+
259
+ value_size = struct.calcsize(format)
260
+ local_unpack = struct.unpack
261
+
262
+ # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
263
+ # not enough to make a significant difference.
264
+
265
+ # Note that we expect someone up-stack to catch struct.error and convert
266
+ # it to _DecodeError -- this way we don't have to set up exception-
267
+ # handling blocks every time we parse one value.
268
+
269
+ def InnerDecode(buffer, pos):
270
+ new_pos = pos + value_size
271
+ result = local_unpack(format, buffer[pos:new_pos])[0]
272
+ return (result, new_pos)
273
+ return _SimpleDecoder(wire_type, InnerDecode)
274
+
275
+
276
+ def _FloatDecoder():
277
+ """Returns a decoder for a float field.
278
+
279
+ This code works around a bug in struct.unpack for non-finite 32-bit
280
+ floating-point values.
281
+ """
282
+
283
+ local_unpack = struct.unpack
284
+
285
+ def InnerDecode(buffer, pos):
286
+ """Decode serialized float to a float and new position.
287
+
288
+ Args:
289
+ buffer: memoryview of the serialized bytes
290
+ pos: int, position in the memory view to start at.
291
+
292
+ Returns:
293
+ Tuple[float, int] of the deserialized float value and new position
294
+ in the serialized data.
295
+ """
296
+ # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
297
+ # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
298
+ new_pos = pos + 4
299
+ float_bytes = buffer[pos:new_pos].tobytes()
300
+
301
+ # If this value has all its exponent bits set, then it's non-finite.
302
+ # In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
303
+ # To avoid that, we parse it specially.
304
+ if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
305
+ # If at least one significand bit is set...
306
+ if float_bytes[0:3] != b'\x00\x00\x80':
307
+ return (math.nan, new_pos)
308
+ # If sign bit is set...
309
+ if float_bytes[3:4] == b'\xFF':
310
+ return (-math.inf, new_pos)
311
+ return (math.inf, new_pos)
312
+
313
+ # Note that we expect someone up-stack to catch struct.error and convert
314
+ # it to _DecodeError -- this way we don't have to set up exception-
315
+ # handling blocks every time we parse one value.
316
+ result = local_unpack('<f', float_bytes)[0]
317
+ return (result, new_pos)
318
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
319
+
320
+
321
+ def _DoubleDecoder():
322
+ """Returns a decoder for a double field.
323
+
324
+ This code works around a bug in struct.unpack for not-a-number.
325
+ """
326
+
327
+ local_unpack = struct.unpack
328
+
329
+ def InnerDecode(buffer, pos):
330
+ """Decode serialized double to a double and new position.
331
+
332
+ Args:
333
+ buffer: memoryview of the serialized bytes.
334
+ pos: int, position in the memory view to start at.
335
+
336
+ Returns:
337
+ Tuple[float, int] of the decoded double value and new position
338
+ in the serialized data.
339
+ """
340
+ # We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
341
+ # bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
342
+ new_pos = pos + 8
343
+ double_bytes = buffer[pos:new_pos].tobytes()
344
+
345
+ # If this value has all its exponent bits set and at least one significand
346
+ # bit set, it's not a number. In Python 2.4, struct.unpack will treat it
347
+ # as inf or -inf. To avoid that, we treat it specially.
348
+ if ((double_bytes[7:8] in b'\x7F\xFF')
349
+ and (double_bytes[6:7] >= b'\xF0')
350
+ and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
351
+ return (math.nan, new_pos)
352
+
353
+ # Note that we expect someone up-stack to catch struct.error and convert
354
+ # it to _DecodeError -- this way we don't have to set up exception-
355
+ # handling blocks every time we parse one value.
356
+ result = local_unpack('<d', double_bytes)[0]
357
+ return (result, new_pos)
358
+ return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
359
+
360
+
361
+ def EnumDecoder(field_number, is_repeated, is_packed, key, new_default,
362
+ clear_if_default=False):
363
+ """Returns a decoder for enum field."""
364
+ enum_type = key.enum_type
365
+ if is_packed:
366
+ local_DecodeVarint = _DecodeVarint
367
+ def DecodePackedField(buffer, pos, end, message, field_dict):
368
+ """Decode serialized packed enum to its value and a new position.
369
+
370
+ Args:
371
+ buffer: memoryview of the serialized bytes.
372
+ pos: int, position in the memory view to start at.
373
+ end: int, end position of serialized data
374
+ message: Message object to store unknown fields in
375
+ field_dict: Map[Descriptor, Any] to store decoded values in.
376
+
377
+ Returns:
378
+ int, new position in serialized data.
379
+ """
380
+ value = field_dict.get(key)
381
+ if value is None:
382
+ value = field_dict.setdefault(key, new_default(message))
383
+ (endpoint, pos) = local_DecodeVarint(buffer, pos)
384
+ endpoint += pos
385
+ if endpoint > end:
386
+ raise _DecodeError('Truncated message.')
387
+ while pos < endpoint:
388
+ value_start_pos = pos
389
+ (element, pos) = _DecodeSignedVarint32(buffer, pos)
390
+ # pylint: disable=protected-access
391
+ if element in enum_type.values_by_number:
392
+ value.append(element)
393
+ else:
394
+ if not message._unknown_fields:
395
+ message._unknown_fields = []
396
+ tag_bytes = encoder.TagBytes(field_number,
397
+ wire_format.WIRETYPE_VARINT)
398
+
399
+ message._unknown_fields.append(
400
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
401
+ # pylint: enable=protected-access
402
+ if pos > endpoint:
403
+ if element in enum_type.values_by_number:
404
+ del value[-1] # Discard corrupt value.
405
+ else:
406
+ del message._unknown_fields[-1]
407
+ # pylint: enable=protected-access
408
+ raise _DecodeError('Packed element was truncated.')
409
+ return pos
410
+ return DecodePackedField
411
+ elif is_repeated:
412
+ tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
413
+ tag_len = len(tag_bytes)
414
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
415
+ """Decode serialized repeated enum to its value and a new position.
416
+
417
+ Args:
418
+ buffer: memoryview of the serialized bytes.
419
+ pos: int, position in the memory view to start at.
420
+ end: int, end position of serialized data
421
+ message: Message object to store unknown fields in
422
+ field_dict: Map[Descriptor, Any] to store decoded values in.
423
+
424
+ Returns:
425
+ int, new position in serialized data.
426
+ """
427
+ value = field_dict.get(key)
428
+ if value is None:
429
+ value = field_dict.setdefault(key, new_default(message))
430
+ while 1:
431
+ (element, new_pos) = _DecodeSignedVarint32(buffer, pos)
432
+ # pylint: disable=protected-access
433
+ if element in enum_type.values_by_number:
434
+ value.append(element)
435
+ else:
436
+ if not message._unknown_fields:
437
+ message._unknown_fields = []
438
+ message._unknown_fields.append(
439
+ (tag_bytes, buffer[pos:new_pos].tobytes()))
440
+ # pylint: enable=protected-access
441
+ # Predict that the next tag is another copy of the same repeated
442
+ # field.
443
+ pos = new_pos + tag_len
444
+ if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
445
+ # Prediction failed. Return.
446
+ if new_pos > end:
447
+ raise _DecodeError('Truncated message.')
448
+ return new_pos
449
+ return DecodeRepeatedField
450
+ else:
451
+ def DecodeField(buffer, pos, end, message, field_dict):
452
+ """Decode serialized repeated enum to its value and a new position.
453
+
454
+ Args:
455
+ buffer: memoryview of the serialized bytes.
456
+ pos: int, position in the memory view to start at.
457
+ end: int, end position of serialized data
458
+ message: Message object to store unknown fields in
459
+ field_dict: Map[Descriptor, Any] to store decoded values in.
460
+
461
+ Returns:
462
+ int, new position in serialized data.
463
+ """
464
+ value_start_pos = pos
465
+ (enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
466
+ if pos > end:
467
+ raise _DecodeError('Truncated message.')
468
+ if clear_if_default and not enum_value:
469
+ field_dict.pop(key, None)
470
+ return pos
471
+ # pylint: disable=protected-access
472
+ if enum_value in enum_type.values_by_number:
473
+ field_dict[key] = enum_value
474
+ else:
475
+ if not message._unknown_fields:
476
+ message._unknown_fields = []
477
+ tag_bytes = encoder.TagBytes(field_number,
478
+ wire_format.WIRETYPE_VARINT)
479
+ message._unknown_fields.append(
480
+ (tag_bytes, buffer[value_start_pos:pos].tobytes()))
481
+ # pylint: enable=protected-access
482
+ return pos
483
+ return DecodeField
484
+
485
+
486
+ # --------------------------------------------------------------------
487
+
488
+
489
+ Int32Decoder = _SimpleDecoder(
490
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
491
+
492
+ Int64Decoder = _SimpleDecoder(
493
+ wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
494
+
495
+ UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
496
+ UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
497
+
498
+ SInt32Decoder = _ModifiedDecoder(
499
+ wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
500
+ SInt64Decoder = _ModifiedDecoder(
501
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
502
+
503
+ # Note that Python conveniently guarantees that when using the '<' prefix on
504
+ # formats, they will also have the same size across all platforms (as opposed
505
+ # to without the prefix, where their sizes depend on the C compiler's basic
506
+ # type sizes).
507
+ Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
508
+ Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
509
+ SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
510
+ SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
511
+ FloatDecoder = _FloatDecoder()
512
+ DoubleDecoder = _DoubleDecoder()
513
+
514
+ BoolDecoder = _ModifiedDecoder(
515
+ wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
516
+
517
+
518
+ def StringDecoder(field_number, is_repeated, is_packed, key, new_default,
519
+ clear_if_default=False):
520
+ """Returns a decoder for a string field."""
521
+
522
+ local_DecodeVarint = _DecodeVarint
523
+
524
+ def _ConvertToUnicode(memview):
525
+ """Convert byte to unicode."""
526
+ byte_str = memview.tobytes()
527
+ try:
528
+ value = str(byte_str, 'utf-8')
529
+ except UnicodeDecodeError as e:
530
+ # add more information to the error message and re-raise it.
531
+ e.reason = '%s in field: %s' % (e, key.full_name)
532
+ raise
533
+
534
+ return value
535
+
536
+ assert not is_packed
537
+ if is_repeated:
538
+ tag_bytes = encoder.TagBytes(field_number,
539
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
540
+ tag_len = len(tag_bytes)
541
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
542
+ value = field_dict.get(key)
543
+ if value is None:
544
+ value = field_dict.setdefault(key, new_default(message))
545
+ while 1:
546
+ (size, pos) = local_DecodeVarint(buffer, pos)
547
+ new_pos = pos + size
548
+ if new_pos > end:
549
+ raise _DecodeError('Truncated string.')
550
+ value.append(_ConvertToUnicode(buffer[pos:new_pos]))
551
+ # Predict that the next tag is another copy of the same repeated field.
552
+ pos = new_pos + tag_len
553
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
554
+ # Prediction failed. Return.
555
+ return new_pos
556
+ return DecodeRepeatedField
557
+ else:
558
+ def DecodeField(buffer, pos, end, message, field_dict):
559
+ (size, pos) = local_DecodeVarint(buffer, pos)
560
+ new_pos = pos + size
561
+ if new_pos > end:
562
+ raise _DecodeError('Truncated string.')
563
+ if clear_if_default and not size:
564
+ field_dict.pop(key, None)
565
+ else:
566
+ field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
567
+ return new_pos
568
+ return DecodeField
569
+
570
+
571
+ def BytesDecoder(field_number, is_repeated, is_packed, key, new_default,
572
+ clear_if_default=False):
573
+ """Returns a decoder for a bytes field."""
574
+
575
+ local_DecodeVarint = _DecodeVarint
576
+
577
+ assert not is_packed
578
+ if is_repeated:
579
+ tag_bytes = encoder.TagBytes(field_number,
580
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
581
+ tag_len = len(tag_bytes)
582
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
583
+ value = field_dict.get(key)
584
+ if value is None:
585
+ value = field_dict.setdefault(key, new_default(message))
586
+ while 1:
587
+ (size, pos) = local_DecodeVarint(buffer, pos)
588
+ new_pos = pos + size
589
+ if new_pos > end:
590
+ raise _DecodeError('Truncated string.')
591
+ value.append(buffer[pos:new_pos].tobytes())
592
+ # Predict that the next tag is another copy of the same repeated field.
593
+ pos = new_pos + tag_len
594
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
595
+ # Prediction failed. Return.
596
+ return new_pos
597
+ return DecodeRepeatedField
598
+ else:
599
+ def DecodeField(buffer, pos, end, message, field_dict):
600
+ (size, pos) = local_DecodeVarint(buffer, pos)
601
+ new_pos = pos + size
602
+ if new_pos > end:
603
+ raise _DecodeError('Truncated string.')
604
+ if clear_if_default and not size:
605
+ field_dict.pop(key, None)
606
+ else:
607
+ field_dict[key] = buffer[pos:new_pos].tobytes()
608
+ return new_pos
609
+ return DecodeField
610
+
611
+
612
+ def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
613
+ """Returns a decoder for a group field."""
614
+
615
+ end_tag_bytes = encoder.TagBytes(field_number,
616
+ wire_format.WIRETYPE_END_GROUP)
617
+ end_tag_len = len(end_tag_bytes)
618
+
619
+ assert not is_packed
620
+ if is_repeated:
621
+ tag_bytes = encoder.TagBytes(field_number,
622
+ wire_format.WIRETYPE_START_GROUP)
623
+ tag_len = len(tag_bytes)
624
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
625
+ value = field_dict.get(key)
626
+ if value is None:
627
+ value = field_dict.setdefault(key, new_default(message))
628
+ while 1:
629
+ value = field_dict.get(key)
630
+ if value is None:
631
+ value = field_dict.setdefault(key, new_default(message))
632
+ # Read sub-message.
633
+ pos = value.add()._InternalParse(buffer, pos, end)
634
+ # Read end tag.
635
+ new_pos = pos+end_tag_len
636
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
637
+ raise _DecodeError('Missing group end tag.')
638
+ # Predict that the next tag is another copy of the same repeated field.
639
+ pos = new_pos + tag_len
640
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
641
+ # Prediction failed. Return.
642
+ return new_pos
643
+ return DecodeRepeatedField
644
+ else:
645
+ def DecodeField(buffer, pos, end, message, field_dict):
646
+ value = field_dict.get(key)
647
+ if value is None:
648
+ value = field_dict.setdefault(key, new_default(message))
649
+ # Read sub-message.
650
+ pos = value._InternalParse(buffer, pos, end)
651
+ # Read end tag.
652
+ new_pos = pos+end_tag_len
653
+ if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
654
+ raise _DecodeError('Missing group end tag.')
655
+ return new_pos
656
+ return DecodeField
657
+
658
+
659
+ def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
660
+ """Returns a decoder for a message field."""
661
+
662
+ local_DecodeVarint = _DecodeVarint
663
+
664
+ assert not is_packed
665
+ if is_repeated:
666
+ tag_bytes = encoder.TagBytes(field_number,
667
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
668
+ tag_len = len(tag_bytes)
669
+ def DecodeRepeatedField(buffer, pos, end, message, field_dict):
670
+ value = field_dict.get(key)
671
+ if value is None:
672
+ value = field_dict.setdefault(key, new_default(message))
673
+ while 1:
674
+ # Read length.
675
+ (size, pos) = local_DecodeVarint(buffer, pos)
676
+ new_pos = pos + size
677
+ if new_pos > end:
678
+ raise _DecodeError('Truncated message.')
679
+ # Read sub-message.
680
+ if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
681
+ # The only reason _InternalParse would return early is if it
682
+ # encountered an end-group tag.
683
+ raise _DecodeError('Unexpected end-group tag.')
684
+ # Predict that the next tag is another copy of the same repeated field.
685
+ pos = new_pos + tag_len
686
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
687
+ # Prediction failed. Return.
688
+ return new_pos
689
+ return DecodeRepeatedField
690
+ else:
691
+ def DecodeField(buffer, pos, end, message, field_dict):
692
+ value = field_dict.get(key)
693
+ if value is None:
694
+ value = field_dict.setdefault(key, new_default(message))
695
+ # Read length.
696
+ (size, pos) = local_DecodeVarint(buffer, pos)
697
+ new_pos = pos + size
698
+ if new_pos > end:
699
+ raise _DecodeError('Truncated message.')
700
+ # Read sub-message.
701
+ if value._InternalParse(buffer, pos, new_pos) != new_pos:
702
+ # The only reason _InternalParse would return early is if it encountered
703
+ # an end-group tag.
704
+ raise _DecodeError('Unexpected end-group tag.')
705
+ return new_pos
706
+ return DecodeField
707
+
708
+
709
+ # --------------------------------------------------------------------
710
+
711
+ MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
712
+
713
+ def MessageSetItemDecoder(descriptor):
714
+ """Returns a decoder for a MessageSet item.
715
+
716
+ The parameter is the message Descriptor.
717
+
718
+ The message set message looks like this:
719
+ message MessageSet {
720
+ repeated group Item = 1 {
721
+ required int32 type_id = 2;
722
+ required string message = 3;
723
+ }
724
+ }
725
+ """
726
+
727
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
728
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
729
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
730
+
731
+ local_ReadTag = ReadTag
732
+ local_DecodeVarint = _DecodeVarint
733
+ local_SkipField = SkipField
734
+
735
+ def DecodeItem(buffer, pos, end, message, field_dict):
736
+ """Decode serialized message set to its value and new position.
737
+
738
+ Args:
739
+ buffer: memoryview of the serialized bytes.
740
+ pos: int, position in the memory view to start at.
741
+ end: int, end position of serialized data
742
+ message: Message object to store unknown fields in
743
+ field_dict: Map[Descriptor, Any] to store decoded values in.
744
+
745
+ Returns:
746
+ int, new position in serialized data.
747
+ """
748
+ message_set_item_start = pos
749
+ type_id = -1
750
+ message_start = -1
751
+ message_end = -1
752
+
753
+ # Technically, type_id and message can appear in any order, so we need
754
+ # a little loop here.
755
+ while 1:
756
+ (tag_bytes, pos) = local_ReadTag(buffer, pos)
757
+ if tag_bytes == type_id_tag_bytes:
758
+ (type_id, pos) = local_DecodeVarint(buffer, pos)
759
+ elif tag_bytes == message_tag_bytes:
760
+ (size, message_start) = local_DecodeVarint(buffer, pos)
761
+ pos = message_end = message_start + size
762
+ elif tag_bytes == item_end_tag_bytes:
763
+ break
764
+ else:
765
+ pos = SkipField(buffer, pos, end, tag_bytes)
766
+ if pos == -1:
767
+ raise _DecodeError('Missing group end tag.')
768
+
769
+ if pos > end:
770
+ raise _DecodeError('Truncated message.')
771
+
772
+ if type_id == -1:
773
+ raise _DecodeError('MessageSet item missing type_id.')
774
+ if message_start == -1:
775
+ raise _DecodeError('MessageSet item missing message.')
776
+
777
+ extension = message.Extensions._FindExtensionByNumber(type_id)
778
+ # pylint: disable=protected-access
779
+ if extension is not None:
780
+ value = field_dict.get(extension)
781
+ if value is None:
782
+ message_type = extension.message_type
783
+ if not hasattr(message_type, '_concrete_class'):
784
+ message_factory.GetMessageClass(message_type)
785
+ value = field_dict.setdefault(
786
+ extension, message_type._concrete_class())
787
+ if value._InternalParse(buffer, message_start,message_end) != message_end:
788
+ # The only reason _InternalParse would return early is if it encountered
789
+ # an end-group tag.
790
+ raise _DecodeError('Unexpected end-group tag.')
791
+ else:
792
+ if not message._unknown_fields:
793
+ message._unknown_fields = []
794
+ message._unknown_fields.append(
795
+ (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes()))
796
+ # pylint: enable=protected-access
797
+
798
+ return pos
799
+
800
+ return DecodeItem
801
+
802
+
803
+ def UnknownMessageSetItemDecoder():
804
+ """Returns a decoder for a Unknown MessageSet item."""
805
+
806
+ type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
807
+ message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
808
+ item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
809
+
810
+ def DecodeUnknownItem(buffer):
811
+ pos = 0
812
+ end = len(buffer)
813
+ message_start = -1
814
+ message_end = -1
815
+ while 1:
816
+ (tag_bytes, pos) = ReadTag(buffer, pos)
817
+ if tag_bytes == type_id_tag_bytes:
818
+ (type_id, pos) = _DecodeVarint(buffer, pos)
819
+ elif tag_bytes == message_tag_bytes:
820
+ (size, message_start) = _DecodeVarint(buffer, pos)
821
+ pos = message_end = message_start + size
822
+ elif tag_bytes == item_end_tag_bytes:
823
+ break
824
+ else:
825
+ pos = SkipField(buffer, pos, end, tag_bytes)
826
+ if pos == -1:
827
+ raise _DecodeError('Missing group end tag.')
828
+
829
+ if pos > end:
830
+ raise _DecodeError('Truncated message.')
831
+
832
+ if type_id == -1:
833
+ raise _DecodeError('MessageSet item missing type_id.')
834
+ if message_start == -1:
835
+ raise _DecodeError('MessageSet item missing message.')
836
+
837
+ return (type_id, buffer[message_start:message_end].tobytes())
838
+
839
+ return DecodeUnknownItem
840
+
841
+ # --------------------------------------------------------------------
842
+
843
+ def MapDecoder(field_descriptor, new_default, is_message_map):
844
+ """Returns a decoder for a map field."""
845
+
846
+ key = field_descriptor
847
+ tag_bytes = encoder.TagBytes(field_descriptor.number,
848
+ wire_format.WIRETYPE_LENGTH_DELIMITED)
849
+ tag_len = len(tag_bytes)
850
+ local_DecodeVarint = _DecodeVarint
851
+ # Can't read _concrete_class yet; might not be initialized.
852
+ message_type = field_descriptor.message_type
853
+
854
+ def DecodeMap(buffer, pos, end, message, field_dict):
855
+ submsg = message_type._concrete_class()
856
+ value = field_dict.get(key)
857
+ if value is None:
858
+ value = field_dict.setdefault(key, new_default(message))
859
+ while 1:
860
+ # Read length.
861
+ (size, pos) = local_DecodeVarint(buffer, pos)
862
+ new_pos = pos + size
863
+ if new_pos > end:
864
+ raise _DecodeError('Truncated message.')
865
+ # Read sub-message.
866
+ submsg.Clear()
867
+ if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
868
+ # The only reason _InternalParse would return early is if it
869
+ # encountered an end-group tag.
870
+ raise _DecodeError('Unexpected end-group tag.')
871
+
872
+ if is_message_map:
873
+ value[submsg.key].CopyFrom(submsg.value)
874
+ else:
875
+ value[submsg.key] = submsg.value
876
+
877
+ # Predict that the next tag is another copy of the same repeated field.
878
+ pos = new_pos + tag_len
879
+ if buffer[new_pos:pos] != tag_bytes or new_pos == end:
880
+ # Prediction failed. Return.
881
+ return new_pos
882
+
883
+ return DecodeMap
884
+
885
+ # --------------------------------------------------------------------
886
+ # Optimization is not as heavy here because calls to SkipField() are rare,
887
+ # except for handling end-group tags.
888
+
889
+ def _SkipVarint(buffer, pos, end):
890
+ """Skip a varint value. Returns the new position."""
891
+ # Previously ord(buffer[pos]) raised IndexError when pos is out of range.
892
+ # With this code, ord(b'') raises TypeError. Both are handled in
893
+ # python_message.py to generate a 'Truncated message' error.
894
+ while ord(buffer[pos:pos+1].tobytes()) & 0x80:
895
+ pos += 1
896
+ pos += 1
897
+ if pos > end:
898
+ raise _DecodeError('Truncated message.')
899
+ return pos
900
+
901
+ def _SkipFixed64(buffer, pos, end):
902
+ """Skip a fixed64 value. Returns the new position."""
903
+
904
+ pos += 8
905
+ if pos > end:
906
+ raise _DecodeError('Truncated message.')
907
+ return pos
908
+
909
+
910
+ def _DecodeFixed64(buffer, pos):
911
+ """Decode a fixed64."""
912
+ new_pos = pos + 8
913
+ return (struct.unpack('<Q', buffer[pos:new_pos])[0], new_pos)
914
+
915
+
916
+ def _SkipLengthDelimited(buffer, pos, end):
917
+ """Skip a length-delimited value. Returns the new position."""
918
+
919
+ (size, pos) = _DecodeVarint(buffer, pos)
920
+ pos += size
921
+ if pos > end:
922
+ raise _DecodeError('Truncated message.')
923
+ return pos
924
+
925
+
926
+ def _SkipGroup(buffer, pos, end):
927
+ """Skip sub-group. Returns the new position."""
928
+
929
+ while 1:
930
+ (tag_bytes, pos) = ReadTag(buffer, pos)
931
+ new_pos = SkipField(buffer, pos, end, tag_bytes)
932
+ if new_pos == -1:
933
+ return pos
934
+ pos = new_pos
935
+
936
+
937
+ def _DecodeUnknownFieldSet(buffer, pos, end_pos=None):
938
+ """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position."""
939
+
940
+ unknown_field_set = containers.UnknownFieldSet()
941
+ while end_pos is None or pos < end_pos:
942
+ (tag_bytes, pos) = ReadTag(buffer, pos)
943
+ (tag, _) = _DecodeVarint(tag_bytes, 0)
944
+ field_number, wire_type = wire_format.UnpackTag(tag)
945
+ if wire_type == wire_format.WIRETYPE_END_GROUP:
946
+ break
947
+ (data, pos) = _DecodeUnknownField(buffer, pos, wire_type)
948
+ # pylint: disable=protected-access
949
+ unknown_field_set._add(field_number, wire_type, data)
950
+
951
+ return (unknown_field_set, pos)
952
+
953
+
954
+ def _DecodeUnknownField(buffer, pos, wire_type):
955
+ """Decode a unknown field. Returns the UnknownField and new position."""
956
+
957
+ if wire_type == wire_format.WIRETYPE_VARINT:
958
+ (data, pos) = _DecodeVarint(buffer, pos)
959
+ elif wire_type == wire_format.WIRETYPE_FIXED64:
960
+ (data, pos) = _DecodeFixed64(buffer, pos)
961
+ elif wire_type == wire_format.WIRETYPE_FIXED32:
962
+ (data, pos) = _DecodeFixed32(buffer, pos)
963
+ elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
964
+ (size, pos) = _DecodeVarint(buffer, pos)
965
+ data = buffer[pos:pos+size].tobytes()
966
+ pos += size
967
+ elif wire_type == wire_format.WIRETYPE_START_GROUP:
968
+ (data, pos) = _DecodeUnknownFieldSet(buffer, pos)
969
+ elif wire_type == wire_format.WIRETYPE_END_GROUP:
970
+ return (0, -1)
971
+ else:
972
+ raise _DecodeError('Wrong wire type in tag.')
973
+
974
+ return (data, pos)
975
+
976
+
977
+ def _EndGroup(buffer, pos, end):
978
+ """Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
979
+
980
+ return -1
981
+
982
+
983
+ def _SkipFixed32(buffer, pos, end):
984
+ """Skip a fixed32 value. Returns the new position."""
985
+
986
+ pos += 4
987
+ if pos > end:
988
+ raise _DecodeError('Truncated message.')
989
+ return pos
990
+
991
+
992
+ def _DecodeFixed32(buffer, pos):
993
+ """Decode a fixed32."""
994
+
995
+ new_pos = pos + 4
996
+ return (struct.unpack('<I', buffer[pos:new_pos])[0], new_pos)
997
+
998
+
999
+ def _RaiseInvalidWireType(buffer, pos, end):
1000
+ """Skip function for unknown wire types. Raises an exception."""
1001
+
1002
+ raise _DecodeError('Tag had invalid wire type.')
1003
+
1004
+ def _FieldSkipper():
1005
+ """Constructs the SkipField function."""
1006
+
1007
+ WIRETYPE_TO_SKIPPER = [
1008
+ _SkipVarint,
1009
+ _SkipFixed64,
1010
+ _SkipLengthDelimited,
1011
+ _SkipGroup,
1012
+ _EndGroup,
1013
+ _SkipFixed32,
1014
+ _RaiseInvalidWireType,
1015
+ _RaiseInvalidWireType,
1016
+ ]
1017
+
1018
+ wiretype_mask = wire_format.TAG_TYPE_MASK
1019
+
1020
+ def SkipField(buffer, pos, end, tag_bytes):
1021
+ """Skips a field with the specified tag.
1022
+
1023
+ |pos| should point to the byte immediately after the tag.
1024
+
1025
+ Returns:
1026
+ The new position (after the tag value), or -1 if the tag is an end-group
1027
+ tag (in which case the calling loop should break).
1028
+ """
1029
+
1030
+ # The wire type is always in the first byte since varints are little-endian.
1031
+ wire_type = ord(tag_bytes[0:1]) & wiretype_mask
1032
+ return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
1033
+
1034
+ return SkipField
1035
+
1036
+ SkipField = _FieldSkipper()
parrot/lib/python3.10/site-packages/google/protobuf/internal/encoder.py ADDED
@@ -0,0 +1,806 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Code for encoding protocol message primitives.
9
+
10
+ Contains the logic for encoding every logical protocol field type
11
+ into one of the 5 physical wire types.
12
+
13
+ This code is designed to push the Python interpreter's performance to the
14
+ limits.
15
+
16
+ The basic idea is that at startup time, for every field (i.e. every
17
+ FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
18
+ sizer takes a value of this field's type and computes its byte size. The
19
+ encoder takes a writer function and a value. It encodes the value into byte
20
+ strings and invokes the writer function to write those strings. Typically the
21
+ writer function is the write() method of a BytesIO.
22
+
23
+ We try to do as much work as possible when constructing the writer and the
24
+ sizer rather than when calling them. In particular:
25
+ * We copy any needed global functions to local variables, so that we do not need
26
+ to do costly global table lookups at runtime.
27
+ * Similarly, we try to do any attribute lookups at startup time if possible.
28
+ * Every field's tag is encoded to bytes at startup, since it can't change at
29
+ runtime.
30
+ * Whatever component of the field size we can compute at startup, we do.
31
+ * We *avoid* sharing code if doing so would make the code slower and not sharing
32
+ does not burden us too much. For example, encoders for repeated fields do
33
+ not just call the encoders for singular fields in a loop because this would
34
+ add an extra function call overhead for every loop iteration; instead, we
35
+ manually inline the single-value encoder into the loop.
36
+ * If a Python function lacks a return statement, Python actually generates
37
+ instructions to pop the result of the last statement off the stack, push
38
+ None onto the stack, and then return that. If we really don't care what
39
+ value is returned, then we can save two instructions by returning the
40
+ result of the last statement. It looks funny but it helps.
41
+ * We assume that type and bounds checking has happened at a higher level.
42
+ """
43
+
44
+ __author__ = 'kenton@google.com (Kenton Varda)'
45
+
46
+ import struct
47
+
48
+ from google.protobuf.internal import wire_format
49
+
50
+
51
+ # This will overflow and thus become IEEE-754 "infinity". We would use
52
+ # "float('inf')" but it doesn't work on Windows pre-Python-2.6.
53
+ _POS_INF = 1e10000
54
+ _NEG_INF = -_POS_INF
55
+
56
+
57
+ def _VarintSize(value):
58
+ """Compute the size of a varint value."""
59
+ if value <= 0x7f: return 1
60
+ if value <= 0x3fff: return 2
61
+ if value <= 0x1fffff: return 3
62
+ if value <= 0xfffffff: return 4
63
+ if value <= 0x7ffffffff: return 5
64
+ if value <= 0x3ffffffffff: return 6
65
+ if value <= 0x1ffffffffffff: return 7
66
+ if value <= 0xffffffffffffff: return 8
67
+ if value <= 0x7fffffffffffffff: return 9
68
+ return 10
69
+
70
+
71
+ def _SignedVarintSize(value):
72
+ """Compute the size of a signed varint value."""
73
+ if value < 0: return 10
74
+ if value <= 0x7f: return 1
75
+ if value <= 0x3fff: return 2
76
+ if value <= 0x1fffff: return 3
77
+ if value <= 0xfffffff: return 4
78
+ if value <= 0x7ffffffff: return 5
79
+ if value <= 0x3ffffffffff: return 6
80
+ if value <= 0x1ffffffffffff: return 7
81
+ if value <= 0xffffffffffffff: return 8
82
+ if value <= 0x7fffffffffffffff: return 9
83
+ return 10
84
+
85
+
86
+ def _TagSize(field_number):
87
+ """Returns the number of bytes required to serialize a tag with this field
88
+ number."""
89
+ # Just pass in type 0, since the type won't affect the tag+type size.
90
+ return _VarintSize(wire_format.PackTag(field_number, 0))
91
+
92
+
93
+ # --------------------------------------------------------------------
94
+ # In this section we define some generic sizers. Each of these functions
95
+ # takes parameters specific to a particular field type, e.g. int32 or fixed64.
96
+ # It returns another function which in turn takes parameters specific to a
97
+ # particular field, e.g. the field number and whether it is repeated or packed.
98
+ # Look at the next section to see how these are used.
99
+
100
+
101
+ def _SimpleSizer(compute_value_size):
102
+ """A sizer which uses the function compute_value_size to compute the size of
103
+ each value. Typically compute_value_size is _VarintSize."""
104
+
105
+ def SpecificSizer(field_number, is_repeated, is_packed):
106
+ tag_size = _TagSize(field_number)
107
+ if is_packed:
108
+ local_VarintSize = _VarintSize
109
+ def PackedFieldSize(value):
110
+ result = 0
111
+ for element in value:
112
+ result += compute_value_size(element)
113
+ return result + local_VarintSize(result) + tag_size
114
+ return PackedFieldSize
115
+ elif is_repeated:
116
+ def RepeatedFieldSize(value):
117
+ result = tag_size * len(value)
118
+ for element in value:
119
+ result += compute_value_size(element)
120
+ return result
121
+ return RepeatedFieldSize
122
+ else:
123
+ def FieldSize(value):
124
+ return tag_size + compute_value_size(value)
125
+ return FieldSize
126
+
127
+ return SpecificSizer
128
+
129
+
130
+ def _ModifiedSizer(compute_value_size, modify_value):
131
+ """Like SimpleSizer, but modify_value is invoked on each value before it is
132
+ passed to compute_value_size. modify_value is typically ZigZagEncode."""
133
+
134
+ def SpecificSizer(field_number, is_repeated, is_packed):
135
+ tag_size = _TagSize(field_number)
136
+ if is_packed:
137
+ local_VarintSize = _VarintSize
138
+ def PackedFieldSize(value):
139
+ result = 0
140
+ for element in value:
141
+ result += compute_value_size(modify_value(element))
142
+ return result + local_VarintSize(result) + tag_size
143
+ return PackedFieldSize
144
+ elif is_repeated:
145
+ def RepeatedFieldSize(value):
146
+ result = tag_size * len(value)
147
+ for element in value:
148
+ result += compute_value_size(modify_value(element))
149
+ return result
150
+ return RepeatedFieldSize
151
+ else:
152
+ def FieldSize(value):
153
+ return tag_size + compute_value_size(modify_value(value))
154
+ return FieldSize
155
+
156
+ return SpecificSizer
157
+
158
+
159
+ def _FixedSizer(value_size):
160
+ """Like _SimpleSizer except for a fixed-size field. The input is the size
161
+ of one value."""
162
+
163
+ def SpecificSizer(field_number, is_repeated, is_packed):
164
+ tag_size = _TagSize(field_number)
165
+ if is_packed:
166
+ local_VarintSize = _VarintSize
167
+ def PackedFieldSize(value):
168
+ result = len(value) * value_size
169
+ return result + local_VarintSize(result) + tag_size
170
+ return PackedFieldSize
171
+ elif is_repeated:
172
+ element_size = value_size + tag_size
173
+ def RepeatedFieldSize(value):
174
+ return len(value) * element_size
175
+ return RepeatedFieldSize
176
+ else:
177
+ field_size = value_size + tag_size
178
+ def FieldSize(value):
179
+ return field_size
180
+ return FieldSize
181
+
182
+ return SpecificSizer
183
+
184
+
185
+ # ====================================================================
186
+ # Here we declare a sizer constructor for each field type. Each "sizer
187
+ # constructor" is a function that takes (field_number, is_repeated, is_packed)
188
+ # as parameters and returns a sizer, which in turn takes a field value as
189
+ # a parameter and returns its encoded size.
190
+
191
+
192
+ Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
193
+
194
+ UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
195
+
196
+ SInt32Sizer = SInt64Sizer = _ModifiedSizer(
197
+ _SignedVarintSize, wire_format.ZigZagEncode)
198
+
199
+ Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
200
+ Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
201
+
202
+ BoolSizer = _FixedSizer(1)
203
+
204
+
205
+ def StringSizer(field_number, is_repeated, is_packed):
206
+ """Returns a sizer for a string field."""
207
+
208
+ tag_size = _TagSize(field_number)
209
+ local_VarintSize = _VarintSize
210
+ local_len = len
211
+ assert not is_packed
212
+ if is_repeated:
213
+ def RepeatedFieldSize(value):
214
+ result = tag_size * len(value)
215
+ for element in value:
216
+ l = local_len(element.encode('utf-8'))
217
+ result += local_VarintSize(l) + l
218
+ return result
219
+ return RepeatedFieldSize
220
+ else:
221
+ def FieldSize(value):
222
+ l = local_len(value.encode('utf-8'))
223
+ return tag_size + local_VarintSize(l) + l
224
+ return FieldSize
225
+
226
+
227
+ def BytesSizer(field_number, is_repeated, is_packed):
228
+ """Returns a sizer for a bytes field."""
229
+
230
+ tag_size = _TagSize(field_number)
231
+ local_VarintSize = _VarintSize
232
+ local_len = len
233
+ assert not is_packed
234
+ if is_repeated:
235
+ def RepeatedFieldSize(value):
236
+ result = tag_size * len(value)
237
+ for element in value:
238
+ l = local_len(element)
239
+ result += local_VarintSize(l) + l
240
+ return result
241
+ return RepeatedFieldSize
242
+ else:
243
+ def FieldSize(value):
244
+ l = local_len(value)
245
+ return tag_size + local_VarintSize(l) + l
246
+ return FieldSize
247
+
248
+
249
+ def GroupSizer(field_number, is_repeated, is_packed):
250
+ """Returns a sizer for a group field."""
251
+
252
+ tag_size = _TagSize(field_number) * 2
253
+ assert not is_packed
254
+ if is_repeated:
255
+ def RepeatedFieldSize(value):
256
+ result = tag_size * len(value)
257
+ for element in value:
258
+ result += element.ByteSize()
259
+ return result
260
+ return RepeatedFieldSize
261
+ else:
262
+ def FieldSize(value):
263
+ return tag_size + value.ByteSize()
264
+ return FieldSize
265
+
266
+
267
+ def MessageSizer(field_number, is_repeated, is_packed):
268
+ """Returns a sizer for a message field."""
269
+
270
+ tag_size = _TagSize(field_number)
271
+ local_VarintSize = _VarintSize
272
+ assert not is_packed
273
+ if is_repeated:
274
+ def RepeatedFieldSize(value):
275
+ result = tag_size * len(value)
276
+ for element in value:
277
+ l = element.ByteSize()
278
+ result += local_VarintSize(l) + l
279
+ return result
280
+ return RepeatedFieldSize
281
+ else:
282
+ def FieldSize(value):
283
+ l = value.ByteSize()
284
+ return tag_size + local_VarintSize(l) + l
285
+ return FieldSize
286
+
287
+
288
+ # --------------------------------------------------------------------
289
+ # MessageSet is special: it needs custom logic to compute its size properly.
290
+
291
+
292
+ def MessageSetItemSizer(field_number):
293
+ """Returns a sizer for extensions of MessageSet.
294
+
295
+ The message set message looks like this:
296
+ message MessageSet {
297
+ repeated group Item = 1 {
298
+ required int32 type_id = 2;
299
+ required string message = 3;
300
+ }
301
+ }
302
+ """
303
+ static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
304
+ _TagSize(3))
305
+ local_VarintSize = _VarintSize
306
+
307
+ def FieldSize(value):
308
+ l = value.ByteSize()
309
+ return static_size + local_VarintSize(l) + l
310
+
311
+ return FieldSize
312
+
313
+
314
+ # --------------------------------------------------------------------
315
+ # Map is special: it needs custom logic to compute its size properly.
316
+
317
+
318
+ def MapSizer(field_descriptor, is_message_map):
319
+ """Returns a sizer for a map field."""
320
+
321
+ # Can't look at field_descriptor.message_type._concrete_class because it may
322
+ # not have been initialized yet.
323
+ message_type = field_descriptor.message_type
324
+ message_sizer = MessageSizer(field_descriptor.number, False, False)
325
+
326
+ def FieldSize(map_value):
327
+ total = 0
328
+ for key in map_value:
329
+ value = map_value[key]
330
+ # It's wasteful to create the messages and throw them away one second
331
+ # later since we'll do the same for the actual encode. But there's not an
332
+ # obvious way to avoid this within the current design without tons of code
333
+ # duplication. For message map, value.ByteSize() should be called to
334
+ # update the status.
335
+ entry_msg = message_type._concrete_class(key=key, value=value)
336
+ total += message_sizer(entry_msg)
337
+ if is_message_map:
338
+ value.ByteSize()
339
+ return total
340
+
341
+ return FieldSize
342
+
343
+ # ====================================================================
344
+ # Encoders!
345
+
346
+
347
+ def _VarintEncoder():
348
+ """Return an encoder for a basic varint value (does not include tag)."""
349
+
350
+ local_int2byte = struct.Struct('>B').pack
351
+
352
+ def EncodeVarint(write, value, unused_deterministic=None):
353
+ bits = value & 0x7f
354
+ value >>= 7
355
+ while value:
356
+ write(local_int2byte(0x80|bits))
357
+ bits = value & 0x7f
358
+ value >>= 7
359
+ return write(local_int2byte(bits))
360
+
361
+ return EncodeVarint
362
+
363
+
364
+ def _SignedVarintEncoder():
365
+ """Return an encoder for a basic signed varint value (does not include
366
+ tag)."""
367
+
368
+ local_int2byte = struct.Struct('>B').pack
369
+
370
+ def EncodeSignedVarint(write, value, unused_deterministic=None):
371
+ if value < 0:
372
+ value += (1 << 64)
373
+ bits = value & 0x7f
374
+ value >>= 7
375
+ while value:
376
+ write(local_int2byte(0x80|bits))
377
+ bits = value & 0x7f
378
+ value >>= 7
379
+ return write(local_int2byte(bits))
380
+
381
+ return EncodeSignedVarint
382
+
383
+
384
+ _EncodeVarint = _VarintEncoder()
385
+ _EncodeSignedVarint = _SignedVarintEncoder()
386
+
387
+
388
+ def _VarintBytes(value):
389
+ """Encode the given integer as a varint and return the bytes. This is only
390
+ called at startup time so it doesn't need to be fast."""
391
+
392
+ pieces = []
393
+ _EncodeVarint(pieces.append, value, True)
394
+ return b"".join(pieces)
395
+
396
+
397
+ def TagBytes(field_number, wire_type):
398
+ """Encode the given tag and return the bytes. Only called at startup."""
399
+
400
+ return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type)))
401
+
402
+ # --------------------------------------------------------------------
403
+ # As with sizers (see above), we have a number of common encoder
404
+ # implementations.
405
+
406
+
407
+ def _SimpleEncoder(wire_type, encode_value, compute_value_size):
408
+ """Return a constructor for an encoder for fields of a particular type.
409
+
410
+ Args:
411
+ wire_type: The field's wire type, for encoding tags.
412
+ encode_value: A function which encodes an individual value, e.g.
413
+ _EncodeVarint().
414
+ compute_value_size: A function which computes the size of an individual
415
+ value, e.g. _VarintSize().
416
+ """
417
+
418
+ def SpecificEncoder(field_number, is_repeated, is_packed):
419
+ if is_packed:
420
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
421
+ local_EncodeVarint = _EncodeVarint
422
+ def EncodePackedField(write, value, deterministic):
423
+ write(tag_bytes)
424
+ size = 0
425
+ for element in value:
426
+ size += compute_value_size(element)
427
+ local_EncodeVarint(write, size, deterministic)
428
+ for element in value:
429
+ encode_value(write, element, deterministic)
430
+ return EncodePackedField
431
+ elif is_repeated:
432
+ tag_bytes = TagBytes(field_number, wire_type)
433
+ def EncodeRepeatedField(write, value, deterministic):
434
+ for element in value:
435
+ write(tag_bytes)
436
+ encode_value(write, element, deterministic)
437
+ return EncodeRepeatedField
438
+ else:
439
+ tag_bytes = TagBytes(field_number, wire_type)
440
+ def EncodeField(write, value, deterministic):
441
+ write(tag_bytes)
442
+ return encode_value(write, value, deterministic)
443
+ return EncodeField
444
+
445
+ return SpecificEncoder
446
+
447
+
448
+ def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
449
+ """Like SimpleEncoder but additionally invokes modify_value on every value
450
+ before passing it to encode_value. Usually modify_value is ZigZagEncode."""
451
+
452
+ def SpecificEncoder(field_number, is_repeated, is_packed):
453
+ if is_packed:
454
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
455
+ local_EncodeVarint = _EncodeVarint
456
+ def EncodePackedField(write, value, deterministic):
457
+ write(tag_bytes)
458
+ size = 0
459
+ for element in value:
460
+ size += compute_value_size(modify_value(element))
461
+ local_EncodeVarint(write, size, deterministic)
462
+ for element in value:
463
+ encode_value(write, modify_value(element), deterministic)
464
+ return EncodePackedField
465
+ elif is_repeated:
466
+ tag_bytes = TagBytes(field_number, wire_type)
467
+ def EncodeRepeatedField(write, value, deterministic):
468
+ for element in value:
469
+ write(tag_bytes)
470
+ encode_value(write, modify_value(element), deterministic)
471
+ return EncodeRepeatedField
472
+ else:
473
+ tag_bytes = TagBytes(field_number, wire_type)
474
+ def EncodeField(write, value, deterministic):
475
+ write(tag_bytes)
476
+ return encode_value(write, modify_value(value), deterministic)
477
+ return EncodeField
478
+
479
+ return SpecificEncoder
480
+
481
+
482
+ def _StructPackEncoder(wire_type, format):
483
+ """Return a constructor for an encoder for a fixed-width field.
484
+
485
+ Args:
486
+ wire_type: The field's wire type, for encoding tags.
487
+ format: The format string to pass to struct.pack().
488
+ """
489
+
490
+ value_size = struct.calcsize(format)
491
+
492
+ def SpecificEncoder(field_number, is_repeated, is_packed):
493
+ local_struct_pack = struct.pack
494
+ if is_packed:
495
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
496
+ local_EncodeVarint = _EncodeVarint
497
+ def EncodePackedField(write, value, deterministic):
498
+ write(tag_bytes)
499
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
500
+ for element in value:
501
+ write(local_struct_pack(format, element))
502
+ return EncodePackedField
503
+ elif is_repeated:
504
+ tag_bytes = TagBytes(field_number, wire_type)
505
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
506
+ for element in value:
507
+ write(tag_bytes)
508
+ write(local_struct_pack(format, element))
509
+ return EncodeRepeatedField
510
+ else:
511
+ tag_bytes = TagBytes(field_number, wire_type)
512
+ def EncodeField(write, value, unused_deterministic=None):
513
+ write(tag_bytes)
514
+ return write(local_struct_pack(format, value))
515
+ return EncodeField
516
+
517
+ return SpecificEncoder
518
+
519
+
520
+ def _FloatingPointEncoder(wire_type, format):
521
+ """Return a constructor for an encoder for float fields.
522
+
523
+ This is like StructPackEncoder, but catches errors that may be due to
524
+ passing non-finite floating-point values to struct.pack, and makes a
525
+ second attempt to encode those values.
526
+
527
+ Args:
528
+ wire_type: The field's wire type, for encoding tags.
529
+ format: The format string to pass to struct.pack().
530
+ """
531
+
532
+ value_size = struct.calcsize(format)
533
+ if value_size == 4:
534
+ def EncodeNonFiniteOrRaise(write, value):
535
+ # Remember that the serialized form uses little-endian byte order.
536
+ if value == _POS_INF:
537
+ write(b'\x00\x00\x80\x7F')
538
+ elif value == _NEG_INF:
539
+ write(b'\x00\x00\x80\xFF')
540
+ elif value != value: # NaN
541
+ write(b'\x00\x00\xC0\x7F')
542
+ else:
543
+ raise
544
+ elif value_size == 8:
545
+ def EncodeNonFiniteOrRaise(write, value):
546
+ if value == _POS_INF:
547
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
548
+ elif value == _NEG_INF:
549
+ write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
550
+ elif value != value: # NaN
551
+ write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
552
+ else:
553
+ raise
554
+ else:
555
+ raise ValueError('Can\'t encode floating-point values that are '
556
+ '%d bytes long (only 4 or 8)' % value_size)
557
+
558
+ def SpecificEncoder(field_number, is_repeated, is_packed):
559
+ local_struct_pack = struct.pack
560
+ if is_packed:
561
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
562
+ local_EncodeVarint = _EncodeVarint
563
+ def EncodePackedField(write, value, deterministic):
564
+ write(tag_bytes)
565
+ local_EncodeVarint(write, len(value) * value_size, deterministic)
566
+ for element in value:
567
+ # This try/except block is going to be faster than any code that
568
+ # we could write to check whether element is finite.
569
+ try:
570
+ write(local_struct_pack(format, element))
571
+ except SystemError:
572
+ EncodeNonFiniteOrRaise(write, element)
573
+ return EncodePackedField
574
+ elif is_repeated:
575
+ tag_bytes = TagBytes(field_number, wire_type)
576
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
577
+ for element in value:
578
+ write(tag_bytes)
579
+ try:
580
+ write(local_struct_pack(format, element))
581
+ except SystemError:
582
+ EncodeNonFiniteOrRaise(write, element)
583
+ return EncodeRepeatedField
584
+ else:
585
+ tag_bytes = TagBytes(field_number, wire_type)
586
+ def EncodeField(write, value, unused_deterministic=None):
587
+ write(tag_bytes)
588
+ try:
589
+ write(local_struct_pack(format, value))
590
+ except SystemError:
591
+ EncodeNonFiniteOrRaise(write, value)
592
+ return EncodeField
593
+
594
+ return SpecificEncoder
595
+
596
+
597
+ # ====================================================================
598
+ # Here we declare an encoder constructor for each field type. These work
599
+ # very similarly to sizer constructors, described earlier.
600
+
601
+
602
+ Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
603
+ wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
604
+
605
+ UInt32Encoder = UInt64Encoder = _SimpleEncoder(
606
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
607
+
608
+ SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
609
+ wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
610
+ wire_format.ZigZagEncode)
611
+
612
+ # Note that Python conveniently guarantees that when using the '<' prefix on
613
+ # formats, they will also have the same size across all platforms (as opposed
614
+ # to without the prefix, where their sizes depend on the C compiler's basic
615
+ # type sizes).
616
+ Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
617
+ Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
618
+ SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
619
+ SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
620
+ FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
621
+ DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
622
+
623
+
624
+ def BoolEncoder(field_number, is_repeated, is_packed):
625
+ """Returns an encoder for a boolean field."""
626
+
627
+ false_byte = b'\x00'
628
+ true_byte = b'\x01'
629
+ if is_packed:
630
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
631
+ local_EncodeVarint = _EncodeVarint
632
+ def EncodePackedField(write, value, deterministic):
633
+ write(tag_bytes)
634
+ local_EncodeVarint(write, len(value), deterministic)
635
+ for element in value:
636
+ if element:
637
+ write(true_byte)
638
+ else:
639
+ write(false_byte)
640
+ return EncodePackedField
641
+ elif is_repeated:
642
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
643
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
644
+ for element in value:
645
+ write(tag_bytes)
646
+ if element:
647
+ write(true_byte)
648
+ else:
649
+ write(false_byte)
650
+ return EncodeRepeatedField
651
+ else:
652
+ tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
653
+ def EncodeField(write, value, unused_deterministic=None):
654
+ write(tag_bytes)
655
+ if value:
656
+ return write(true_byte)
657
+ return write(false_byte)
658
+ return EncodeField
659
+
660
+
661
+ def StringEncoder(field_number, is_repeated, is_packed):
662
+ """Returns an encoder for a string field."""
663
+
664
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
665
+ local_EncodeVarint = _EncodeVarint
666
+ local_len = len
667
+ assert not is_packed
668
+ if is_repeated:
669
+ def EncodeRepeatedField(write, value, deterministic):
670
+ for element in value:
671
+ encoded = element.encode('utf-8')
672
+ write(tag)
673
+ local_EncodeVarint(write, local_len(encoded), deterministic)
674
+ write(encoded)
675
+ return EncodeRepeatedField
676
+ else:
677
+ def EncodeField(write, value, deterministic):
678
+ encoded = value.encode('utf-8')
679
+ write(tag)
680
+ local_EncodeVarint(write, local_len(encoded), deterministic)
681
+ return write(encoded)
682
+ return EncodeField
683
+
684
+
685
+ def BytesEncoder(field_number, is_repeated, is_packed):
686
+ """Returns an encoder for a bytes field."""
687
+
688
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
689
+ local_EncodeVarint = _EncodeVarint
690
+ local_len = len
691
+ assert not is_packed
692
+ if is_repeated:
693
+ def EncodeRepeatedField(write, value, deterministic):
694
+ for element in value:
695
+ write(tag)
696
+ local_EncodeVarint(write, local_len(element), deterministic)
697
+ write(element)
698
+ return EncodeRepeatedField
699
+ else:
700
+ def EncodeField(write, value, deterministic):
701
+ write(tag)
702
+ local_EncodeVarint(write, local_len(value), deterministic)
703
+ return write(value)
704
+ return EncodeField
705
+
706
+
707
+ def GroupEncoder(field_number, is_repeated, is_packed):
708
+ """Returns an encoder for a group field."""
709
+
710
+ start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
711
+ end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
712
+ assert not is_packed
713
+ if is_repeated:
714
+ def EncodeRepeatedField(write, value, deterministic):
715
+ for element in value:
716
+ write(start_tag)
717
+ element._InternalSerialize(write, deterministic)
718
+ write(end_tag)
719
+ return EncodeRepeatedField
720
+ else:
721
+ def EncodeField(write, value, deterministic):
722
+ write(start_tag)
723
+ value._InternalSerialize(write, deterministic)
724
+ return write(end_tag)
725
+ return EncodeField
726
+
727
+
728
+ def MessageEncoder(field_number, is_repeated, is_packed):
729
+ """Returns an encoder for a message field."""
730
+
731
+ tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
732
+ local_EncodeVarint = _EncodeVarint
733
+ assert not is_packed
734
+ if is_repeated:
735
+ def EncodeRepeatedField(write, value, deterministic):
736
+ for element in value:
737
+ write(tag)
738
+ local_EncodeVarint(write, element.ByteSize(), deterministic)
739
+ element._InternalSerialize(write, deterministic)
740
+ return EncodeRepeatedField
741
+ else:
742
+ def EncodeField(write, value, deterministic):
743
+ write(tag)
744
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
745
+ return value._InternalSerialize(write, deterministic)
746
+ return EncodeField
747
+
748
+
749
+ # --------------------------------------------------------------------
750
+ # As before, MessageSet is special.
751
+
752
+
753
+ def MessageSetItemEncoder(field_number):
754
+ """Encoder for extensions of MessageSet.
755
+
756
+ The message set message looks like this:
757
+ message MessageSet {
758
+ repeated group Item = 1 {
759
+ required int32 type_id = 2;
760
+ required string message = 3;
761
+ }
762
+ }
763
+ """
764
+ start_bytes = b"".join([
765
+ TagBytes(1, wire_format.WIRETYPE_START_GROUP),
766
+ TagBytes(2, wire_format.WIRETYPE_VARINT),
767
+ _VarintBytes(field_number),
768
+ TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
769
+ end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
770
+ local_EncodeVarint = _EncodeVarint
771
+
772
+ def EncodeField(write, value, deterministic):
773
+ write(start_bytes)
774
+ local_EncodeVarint(write, value.ByteSize(), deterministic)
775
+ value._InternalSerialize(write, deterministic)
776
+ return write(end_bytes)
777
+
778
+ return EncodeField
779
+
780
+
781
+ # --------------------------------------------------------------------
782
+ # As before, Map is special.
783
+
784
+
785
+ def MapEncoder(field_descriptor):
786
+ """Encoder for extensions of MessageSet.
787
+
788
+ Maps always have a wire format like this:
789
+ message MapEntry {
790
+ key_type key = 1;
791
+ value_type value = 2;
792
+ }
793
+ repeated MapEntry map = N;
794
+ """
795
+ # Can't look at field_descriptor.message_type._concrete_class because it may
796
+ # not have been initialized yet.
797
+ message_type = field_descriptor.message_type
798
+ encode_message = MessageEncoder(field_descriptor.number, False, False)
799
+
800
+ def EncodeField(write, value, deterministic):
801
+ value_keys = sorted(value.keys()) if deterministic else value
802
+ for key in value_keys:
803
+ entry_msg = message_type._concrete_class(key=key, value=value[key])
804
+ encode_message(write, entry_msg, deterministic)
805
+
806
+ return EncodeField
parrot/lib/python3.10/site-packages/google/protobuf/internal/enum_type_wrapper.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A simple wrapper around enum types to expose utility functions.
9
+
10
+ Instances are created as properties with the same name as the enum they wrap
11
+ on proto classes. For usage, see:
12
+ reflection_test.py
13
+ """
14
+
15
+ import sys
16
+
17
+ __author__ = 'rabsatt@google.com (Kevin Rabsatt)'
18
+
19
+
20
+ class EnumTypeWrapper(object):
21
+ """A utility for finding the names of enum values."""
22
+
23
+ DESCRIPTOR = None
24
+
25
+ # This is a type alias, which mypy typing stubs can type as
26
+ # a genericized parameter constrained to an int, allowing subclasses
27
+ # to be typed with more constraint in .pyi stubs
28
+ # Eg.
29
+ # def MyGeneratedEnum(Message):
30
+ # ValueType = NewType('ValueType', int)
31
+ # def Name(self, number: MyGeneratedEnum.ValueType) -> str
32
+ ValueType = int
33
+
34
+ def __init__(self, enum_type):
35
+ """Inits EnumTypeWrapper with an EnumDescriptor."""
36
+ self._enum_type = enum_type
37
+ self.DESCRIPTOR = enum_type # pylint: disable=invalid-name
38
+
39
+ def Name(self, number): # pylint: disable=invalid-name
40
+ """Returns a string containing the name of an enum value."""
41
+ try:
42
+ return self._enum_type.values_by_number[number].name
43
+ except KeyError:
44
+ pass # fall out to break exception chaining
45
+
46
+ if not isinstance(number, int):
47
+ raise TypeError(
48
+ 'Enum value for {} must be an int, but got {} {!r}.'.format(
49
+ self._enum_type.name, type(number), number))
50
+ else:
51
+ # repr here to handle the odd case when you pass in a boolean.
52
+ raise ValueError('Enum {} has no name defined for value {!r}'.format(
53
+ self._enum_type.name, number))
54
+
55
+ def Value(self, name): # pylint: disable=invalid-name
56
+ """Returns the value corresponding to the given enum name."""
57
+ try:
58
+ return self._enum_type.values_by_name[name].number
59
+ except KeyError:
60
+ pass # fall out to break exception chaining
61
+ raise ValueError('Enum {} has no value defined for name {!r}'.format(
62
+ self._enum_type.name, name))
63
+
64
+ def keys(self):
65
+ """Return a list of the string names in the enum.
66
+
67
+ Returns:
68
+ A list of strs, in the order they were defined in the .proto file.
69
+ """
70
+
71
+ return [value_descriptor.name
72
+ for value_descriptor in self._enum_type.values]
73
+
74
+ def values(self):
75
+ """Return a list of the integer values in the enum.
76
+
77
+ Returns:
78
+ A list of ints, in the order they were defined in the .proto file.
79
+ """
80
+
81
+ return [value_descriptor.number
82
+ for value_descriptor in self._enum_type.values]
83
+
84
+ def items(self):
85
+ """Return a list of the (name, value) pairs of the enum.
86
+
87
+ Returns:
88
+ A list of (str, int) pairs, in the order they were defined
89
+ in the .proto file.
90
+ """
91
+ return [(value_descriptor.name, value_descriptor.number)
92
+ for value_descriptor in self._enum_type.values]
93
+
94
+ def __getattr__(self, name):
95
+ """Returns the value corresponding to the given enum name."""
96
+ try:
97
+ return super(
98
+ EnumTypeWrapper,
99
+ self).__getattribute__('_enum_type').values_by_name[name].number
100
+ except KeyError:
101
+ pass # fall out to break exception chaining
102
+ raise AttributeError('Enum {} has no value defined for name {!r}'.format(
103
+ self._enum_type.name, name))
104
+
105
+ def __or__(self, other):
106
+ """Returns the union type of self and other."""
107
+ if sys.version_info >= (3, 10):
108
+ return type(self) | other
109
+ else:
110
+ raise NotImplementedError(
111
+ 'You may not use | on EnumTypes (or classes) below python 3.10'
112
+ )
parrot/lib/python3.10/site-packages/google/protobuf/internal/extension_dict.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains _ExtensionDict class to represent extensions.
9
+ """
10
+
11
+ from google.protobuf.internal import type_checkers
12
+ from google.protobuf.descriptor import FieldDescriptor
13
+
14
+
15
+ def _VerifyExtensionHandle(message, extension_handle):
16
+ """Verify that the given extension handle is valid."""
17
+
18
+ if not isinstance(extension_handle, FieldDescriptor):
19
+ raise KeyError('HasExtension() expects an extension handle, got: %s' %
20
+ extension_handle)
21
+
22
+ if not extension_handle.is_extension:
23
+ raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
24
+
25
+ if not extension_handle.containing_type:
26
+ raise KeyError('"%s" is missing a containing_type.'
27
+ % extension_handle.full_name)
28
+
29
+ if extension_handle.containing_type is not message.DESCRIPTOR:
30
+ raise KeyError('Extension "%s" extends message type "%s", but this '
31
+ 'message is of type "%s".' %
32
+ (extension_handle.full_name,
33
+ extension_handle.containing_type.full_name,
34
+ message.DESCRIPTOR.full_name))
35
+
36
+
37
+ # TODO: Unify error handling of "unknown extension" crap.
38
+ # TODO: Support iteritems()-style iteration over all
39
+ # extensions with the "has" bits turned on?
40
+ class _ExtensionDict(object):
41
+
42
+ """Dict-like container for Extension fields on proto instances.
43
+
44
+ Note that in all cases we expect extension handles to be
45
+ FieldDescriptors.
46
+ """
47
+
48
+ def __init__(self, extended_message):
49
+ """
50
+ Args:
51
+ extended_message: Message instance for which we are the Extensions dict.
52
+ """
53
+ self._extended_message = extended_message
54
+
55
+ def __getitem__(self, extension_handle):
56
+ """Returns the current value of the given extension handle."""
57
+
58
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
59
+
60
+ result = self._extended_message._fields.get(extension_handle)
61
+ if result is not None:
62
+ return result
63
+
64
+ if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
65
+ result = extension_handle._default_constructor(self._extended_message)
66
+ elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
67
+ message_type = extension_handle.message_type
68
+ if not hasattr(message_type, '_concrete_class'):
69
+ # pylint: disable=g-import-not-at-top
70
+ from google.protobuf import message_factory
71
+ message_factory.GetMessageClass(message_type)
72
+ if not hasattr(extension_handle.message_type, '_concrete_class'):
73
+ from google.protobuf import message_factory
74
+ message_factory.GetMessageClass(extension_handle.message_type)
75
+ result = extension_handle.message_type._concrete_class()
76
+ try:
77
+ result._SetListener(self._extended_message._listener_for_children)
78
+ except ReferenceError:
79
+ pass
80
+ else:
81
+ # Singular scalar -- just return the default without inserting into the
82
+ # dict.
83
+ return extension_handle.default_value
84
+
85
+ # Atomically check if another thread has preempted us and, if not, swap
86
+ # in the new object we just created. If someone has preempted us, we
87
+ # take that object and discard ours.
88
+ # WARNING: We are relying on setdefault() being atomic. This is true
89
+ # in CPython but we haven't investigated others. This warning appears
90
+ # in several other locations in this file.
91
+ result = self._extended_message._fields.setdefault(
92
+ extension_handle, result)
93
+
94
+ return result
95
+
96
+ def __eq__(self, other):
97
+ if not isinstance(other, self.__class__):
98
+ return False
99
+
100
+ my_fields = self._extended_message.ListFields()
101
+ other_fields = other._extended_message.ListFields()
102
+
103
+ # Get rid of non-extension fields.
104
+ my_fields = [field for field in my_fields if field.is_extension]
105
+ other_fields = [field for field in other_fields if field.is_extension]
106
+
107
+ return my_fields == other_fields
108
+
109
+ def __ne__(self, other):
110
+ return not self == other
111
+
112
+ def __len__(self):
113
+ fields = self._extended_message.ListFields()
114
+ # Get rid of non-extension fields.
115
+ extension_fields = [field for field in fields if field[0].is_extension]
116
+ return len(extension_fields)
117
+
118
+ def __hash__(self):
119
+ raise TypeError('unhashable object')
120
+
121
+ # Note that this is only meaningful for non-repeated, scalar extension
122
+ # fields. Note also that we may have to call _Modified() when we do
123
+ # successfully set a field this way, to set any necessary "has" bits in the
124
+ # ancestors of the extended message.
125
+ def __setitem__(self, extension_handle, value):
126
+ """If extension_handle specifies a non-repeated, scalar extension
127
+ field, sets the value of that field.
128
+ """
129
+
130
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
131
+
132
+ if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
133
+ extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
134
+ raise TypeError(
135
+ 'Cannot assign to extension "%s" because it is a repeated or '
136
+ 'composite type.' % extension_handle.full_name)
137
+
138
+ # It's slightly wasteful to lookup the type checker each time,
139
+ # but we expect this to be a vanishingly uncommon case anyway.
140
+ type_checker = type_checkers.GetTypeChecker(extension_handle)
141
+ # pylint: disable=protected-access
142
+ self._extended_message._fields[extension_handle] = (
143
+ type_checker.CheckValue(value))
144
+ self._extended_message._Modified()
145
+
146
+ def __delitem__(self, extension_handle):
147
+ self._extended_message.ClearExtension(extension_handle)
148
+
149
+ def _FindExtensionByName(self, name):
150
+ """Tries to find a known extension with the specified name.
151
+
152
+ Args:
153
+ name: Extension full name.
154
+
155
+ Returns:
156
+ Extension field descriptor.
157
+ """
158
+ descriptor = self._extended_message.DESCRIPTOR
159
+ extensions = descriptor.file.pool._extensions_by_name[descriptor]
160
+ return extensions.get(name, None)
161
+
162
+ def _FindExtensionByNumber(self, number):
163
+ """Tries to find a known extension with the field number.
164
+
165
+ Args:
166
+ number: Extension field number.
167
+
168
+ Returns:
169
+ Extension field descriptor.
170
+ """
171
+ descriptor = self._extended_message.DESCRIPTOR
172
+ extensions = descriptor.file.pool._extensions_by_number[descriptor]
173
+ return extensions.get(number, None)
174
+
175
+ def __iter__(self):
176
+ # Return a generator over the populated extension fields
177
+ return (f[0] for f in self._extended_message.ListFields()
178
+ if f[0].is_extension)
179
+
180
+ def __contains__(self, extension_handle):
181
+ _VerifyExtensionHandle(self._extended_message, extension_handle)
182
+
183
+ if extension_handle not in self._extended_message._fields:
184
+ return False
185
+
186
+ if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
187
+ return bool(self._extended_message._fields.get(extension_handle))
188
+
189
+ if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
190
+ value = self._extended_message._fields.get(extension_handle)
191
+ # pylint: disable=protected-access
192
+ return value is not None and value._is_present_in_parent
193
+
194
+ return True
parrot/lib/python3.10/site-packages/google/protobuf/internal/field_mask.py ADDED
@@ -0,0 +1,310 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains FieldMask class."""
9
+
10
+ from google.protobuf.descriptor import FieldDescriptor
11
+
12
+
13
+ class FieldMask(object):
14
+ """Class for FieldMask message type."""
15
+
16
+ __slots__ = ()
17
+
18
+ def ToJsonString(self):
19
+ """Converts FieldMask to string according to proto3 JSON spec."""
20
+ camelcase_paths = []
21
+ for path in self.paths:
22
+ camelcase_paths.append(_SnakeCaseToCamelCase(path))
23
+ return ','.join(camelcase_paths)
24
+
25
+ def FromJsonString(self, value):
26
+ """Converts string to FieldMask according to proto3 JSON spec."""
27
+ if not isinstance(value, str):
28
+ raise ValueError('FieldMask JSON value not a string: {!r}'.format(value))
29
+ self.Clear()
30
+ if value:
31
+ for path in value.split(','):
32
+ self.paths.append(_CamelCaseToSnakeCase(path))
33
+
34
+ def IsValidForDescriptor(self, message_descriptor):
35
+ """Checks whether the FieldMask is valid for Message Descriptor."""
36
+ for path in self.paths:
37
+ if not _IsValidPath(message_descriptor, path):
38
+ return False
39
+ return True
40
+
41
+ def AllFieldsFromDescriptor(self, message_descriptor):
42
+ """Gets all direct fields of Message Descriptor to FieldMask."""
43
+ self.Clear()
44
+ for field in message_descriptor.fields:
45
+ self.paths.append(field.name)
46
+
47
+ def CanonicalFormFromMask(self, mask):
48
+ """Converts a FieldMask to the canonical form.
49
+
50
+ Removes paths that are covered by another path. For example,
51
+ "foo.bar" is covered by "foo" and will be removed if "foo"
52
+ is also in the FieldMask. Then sorts all paths in alphabetical order.
53
+
54
+ Args:
55
+ mask: The original FieldMask to be converted.
56
+ """
57
+ tree = _FieldMaskTree(mask)
58
+ tree.ToFieldMask(self)
59
+
60
+ def Union(self, mask1, mask2):
61
+ """Merges mask1 and mask2 into this FieldMask."""
62
+ _CheckFieldMaskMessage(mask1)
63
+ _CheckFieldMaskMessage(mask2)
64
+ tree = _FieldMaskTree(mask1)
65
+ tree.MergeFromFieldMask(mask2)
66
+ tree.ToFieldMask(self)
67
+
68
+ def Intersect(self, mask1, mask2):
69
+ """Intersects mask1 and mask2 into this FieldMask."""
70
+ _CheckFieldMaskMessage(mask1)
71
+ _CheckFieldMaskMessage(mask2)
72
+ tree = _FieldMaskTree(mask1)
73
+ intersection = _FieldMaskTree()
74
+ for path in mask2.paths:
75
+ tree.IntersectPath(path, intersection)
76
+ intersection.ToFieldMask(self)
77
+
78
+ def MergeMessage(
79
+ self, source, destination,
80
+ replace_message_field=False, replace_repeated_field=False):
81
+ """Merges fields specified in FieldMask from source to destination.
82
+
83
+ Args:
84
+ source: Source message.
85
+ destination: The destination message to be merged into.
86
+ replace_message_field: Replace message field if True. Merge message
87
+ field if False.
88
+ replace_repeated_field: Replace repeated field if True. Append
89
+ elements of repeated field if False.
90
+ """
91
+ tree = _FieldMaskTree(self)
92
+ tree.MergeMessage(
93
+ source, destination, replace_message_field, replace_repeated_field)
94
+
95
+
96
+ def _IsValidPath(message_descriptor, path):
97
+ """Checks whether the path is valid for Message Descriptor."""
98
+ parts = path.split('.')
99
+ last = parts.pop()
100
+ for name in parts:
101
+ field = message_descriptor.fields_by_name.get(name)
102
+ if (field is None or
103
+ field.label == FieldDescriptor.LABEL_REPEATED or
104
+ field.type != FieldDescriptor.TYPE_MESSAGE):
105
+ return False
106
+ message_descriptor = field.message_type
107
+ return last in message_descriptor.fields_by_name
108
+
109
+
110
+ def _CheckFieldMaskMessage(message):
111
+ """Raises ValueError if message is not a FieldMask."""
112
+ message_descriptor = message.DESCRIPTOR
113
+ if (message_descriptor.name != 'FieldMask' or
114
+ message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
115
+ raise ValueError('Message {0} is not a FieldMask.'.format(
116
+ message_descriptor.full_name))
117
+
118
+
119
+ def _SnakeCaseToCamelCase(path_name):
120
+ """Converts a path name from snake_case to camelCase."""
121
+ result = []
122
+ after_underscore = False
123
+ for c in path_name:
124
+ if c.isupper():
125
+ raise ValueError(
126
+ 'Fail to print FieldMask to Json string: Path name '
127
+ '{0} must not contain uppercase letters.'.format(path_name))
128
+ if after_underscore:
129
+ if c.islower():
130
+ result.append(c.upper())
131
+ after_underscore = False
132
+ else:
133
+ raise ValueError(
134
+ 'Fail to print FieldMask to Json string: The '
135
+ 'character after a "_" must be a lowercase letter '
136
+ 'in path name {0}.'.format(path_name))
137
+ elif c == '_':
138
+ after_underscore = True
139
+ else:
140
+ result += c
141
+
142
+ if after_underscore:
143
+ raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
144
+ 'in path name {0}.'.format(path_name))
145
+ return ''.join(result)
146
+
147
+
148
+ def _CamelCaseToSnakeCase(path_name):
149
+ """Converts a field name from camelCase to snake_case."""
150
+ result = []
151
+ for c in path_name:
152
+ if c == '_':
153
+ raise ValueError('Fail to parse FieldMask: Path name '
154
+ '{0} must not contain "_"s.'.format(path_name))
155
+ if c.isupper():
156
+ result += '_'
157
+ result += c.lower()
158
+ else:
159
+ result += c
160
+ return ''.join(result)
161
+
162
+
163
+ class _FieldMaskTree(object):
164
+ """Represents a FieldMask in a tree structure.
165
+
166
+ For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
167
+ the FieldMaskTree will be:
168
+ [_root] -+- foo -+- bar
169
+ | |
170
+ | +- baz
171
+ |
172
+ +- bar --- baz
173
+ In the tree, each leaf node represents a field path.
174
+ """
175
+
176
+ __slots__ = ('_root',)
177
+
178
+ def __init__(self, field_mask=None):
179
+ """Initializes the tree by FieldMask."""
180
+ self._root = {}
181
+ if field_mask:
182
+ self.MergeFromFieldMask(field_mask)
183
+
184
+ def MergeFromFieldMask(self, field_mask):
185
+ """Merges a FieldMask to the tree."""
186
+ for path in field_mask.paths:
187
+ self.AddPath(path)
188
+
189
+ def AddPath(self, path):
190
+ """Adds a field path into the tree.
191
+
192
+ If the field path to add is a sub-path of an existing field path
193
+ in the tree (i.e., a leaf node), it means the tree already matches
194
+ the given path so nothing will be added to the tree. If the path
195
+ matches an existing non-leaf node in the tree, that non-leaf node
196
+ will be turned into a leaf node with all its children removed because
197
+ the path matches all the node's children. Otherwise, a new path will
198
+ be added.
199
+
200
+ Args:
201
+ path: The field path to add.
202
+ """
203
+ node = self._root
204
+ for name in path.split('.'):
205
+ if name not in node:
206
+ node[name] = {}
207
+ elif not node[name]:
208
+ # Pre-existing empty node implies we already have this entire tree.
209
+ return
210
+ node = node[name]
211
+ # Remove any sub-trees we might have had.
212
+ node.clear()
213
+
214
+ def ToFieldMask(self, field_mask):
215
+ """Converts the tree to a FieldMask."""
216
+ field_mask.Clear()
217
+ _AddFieldPaths(self._root, '', field_mask)
218
+
219
+ def IntersectPath(self, path, intersection):
220
+ """Calculates the intersection part of a field path with this tree.
221
+
222
+ Args:
223
+ path: The field path to calculates.
224
+ intersection: The out tree to record the intersection part.
225
+ """
226
+ node = self._root
227
+ for name in path.split('.'):
228
+ if name not in node:
229
+ return
230
+ elif not node[name]:
231
+ intersection.AddPath(path)
232
+ return
233
+ node = node[name]
234
+ intersection.AddLeafNodes(path, node)
235
+
236
+ def AddLeafNodes(self, prefix, node):
237
+ """Adds leaf nodes begin with prefix to this tree."""
238
+ if not node:
239
+ self.AddPath(prefix)
240
+ for name in node:
241
+ child_path = prefix + '.' + name
242
+ self.AddLeafNodes(child_path, node[name])
243
+
244
+ def MergeMessage(
245
+ self, source, destination,
246
+ replace_message, replace_repeated):
247
+ """Merge all fields specified by this tree from source to destination."""
248
+ _MergeMessage(
249
+ self._root, source, destination, replace_message, replace_repeated)
250
+
251
+
252
+ def _StrConvert(value):
253
+ """Converts value to str if it is not."""
254
+ # This file is imported by c extension and some methods like ClearField
255
+ # requires string for the field name. py2/py3 has different text
256
+ # type and may use unicode.
257
+ if not isinstance(value, str):
258
+ return value.encode('utf-8')
259
+ return value
260
+
261
+
262
+ def _MergeMessage(
263
+ node, source, destination, replace_message, replace_repeated):
264
+ """Merge all fields specified by a sub-tree from source to destination."""
265
+ source_descriptor = source.DESCRIPTOR
266
+ for name in node:
267
+ child = node[name]
268
+ field = source_descriptor.fields_by_name[name]
269
+ if field is None:
270
+ raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
271
+ name, source_descriptor.full_name))
272
+ if child:
273
+ # Sub-paths are only allowed for singular message fields.
274
+ if (field.label == FieldDescriptor.LABEL_REPEATED or
275
+ field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
276
+ raise ValueError('Error: Field {0} in message {1} is not a singular '
277
+ 'message field and cannot have sub-fields.'.format(
278
+ name, source_descriptor.full_name))
279
+ if source.HasField(name):
280
+ _MergeMessage(
281
+ child, getattr(source, name), getattr(destination, name),
282
+ replace_message, replace_repeated)
283
+ continue
284
+ if field.label == FieldDescriptor.LABEL_REPEATED:
285
+ if replace_repeated:
286
+ destination.ClearField(_StrConvert(name))
287
+ repeated_source = getattr(source, name)
288
+ repeated_destination = getattr(destination, name)
289
+ repeated_destination.MergeFrom(repeated_source)
290
+ else:
291
+ if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
292
+ if replace_message:
293
+ destination.ClearField(_StrConvert(name))
294
+ if source.HasField(name):
295
+ getattr(destination, name).MergeFrom(getattr(source, name))
296
+ else:
297
+ setattr(destination, name, getattr(source, name))
298
+
299
+
300
+ def _AddFieldPaths(node, prefix, field_mask):
301
+ """Adds the field paths descended from node to field_mask."""
302
+ if not node and prefix:
303
+ field_mask.paths.append(prefix)
304
+ return
305
+ for name in sorted(node):
306
+ if prefix:
307
+ child_path = prefix + '.' + name
308
+ else:
309
+ child_path = name
310
+ _AddFieldPaths(node[name], child_path, field_mask)
parrot/lib/python3.10/site-packages/google/protobuf/internal/message_listener.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Defines a listener interface for observing certain
9
+ state transitions on Message objects.
10
+
11
+ Also defines a null implementation of this interface.
12
+ """
13
+
14
+ __author__ = 'robinson@google.com (Will Robinson)'
15
+
16
+
17
+ class MessageListener(object):
18
+
19
+ """Listens for modifications made to a message. Meant to be registered via
20
+ Message._SetListener().
21
+
22
+ Attributes:
23
+ dirty: If True, then calling Modified() would be a no-op. This can be
24
+ used to avoid these calls entirely in the common case.
25
+ """
26
+
27
+ def Modified(self):
28
+ """Called every time the message is modified in such a way that the parent
29
+ message may need to be updated. This currently means either:
30
+ (a) The message was modified for the first time, so the parent message
31
+ should henceforth mark the message as present.
32
+ (b) The message's cached byte size became dirty -- i.e. the message was
33
+ modified for the first time after a previous call to ByteSize().
34
+ Therefore the parent should also mark its byte size as dirty.
35
+ Note that (a) implies (b), since new objects start out with a client cached
36
+ size (zero). However, we document (a) explicitly because it is important.
37
+
38
+ Modified() will *only* be called in response to one of these two events --
39
+ not every time the sub-message is modified.
40
+
41
+ Note that if the listener's |dirty| attribute is true, then calling
42
+ Modified at the moment would be a no-op, so it can be skipped. Performance-
43
+ sensitive callers should check this attribute directly before calling since
44
+ it will be true most of the time.
45
+ """
46
+
47
+ raise NotImplementedError
48
+
49
+
50
+ class NullMessageListener(object):
51
+
52
+ """No-op MessageListener implementation."""
53
+
54
+ def Modified(self):
55
+ pass
parrot/lib/python3.10/site-packages/google/protobuf/internal/python_edition_defaults.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ """
2
+ This file contains the serialized FeatureSetDefaults object corresponding to
3
+ the Pure Python runtime. This is used for feature resolution under Editions.
4
+ """
5
+ _PROTOBUF_INTERNAL_PYTHON_EDITION_DEFAULTS = b"\n\023\030\204\007\"\000*\014\010\001\020\002\030\002 \003(\0010\002\n\023\030\347\007\"\000*\014\010\002\020\001\030\001 \002(\0010\001\n\023\030\350\007\"\014\010\001\020\001\030\001 \002(\0010\001*\000 \346\007(\350\007"
parrot/lib/python3.10/site-packages/google/protobuf/internal/python_message.py ADDED
@@ -0,0 +1,1580 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ # This code is meant to work on Python 2.4 and above only.
9
+ #
10
+ # TODO: Helpers for verbose, common checks like seeing if a
11
+ # descriptor's cpp_type is CPPTYPE_MESSAGE.
12
+
13
+ """Contains a metaclass and helper functions used to create
14
+ protocol message classes from Descriptor objects at runtime.
15
+
16
+ Recall that a metaclass is the "type" of a class.
17
+ (A class is to a metaclass what an instance is to a class.)
18
+
19
+ In this case, we use the GeneratedProtocolMessageType metaclass
20
+ to inject all the useful functionality into the classes
21
+ output by the protocol compiler at compile-time.
22
+
23
+ The upshot of all this is that the real implementation
24
+ details for ALL pure-Python protocol buffers are *here in
25
+ this file*.
26
+ """
27
+
28
+ __author__ = 'robinson@google.com (Will Robinson)'
29
+
30
+ import datetime
31
+ from io import BytesIO
32
+ import struct
33
+ import sys
34
+ import warnings
35
+ import weakref
36
+
37
+ from google.protobuf import descriptor as descriptor_mod
38
+ from google.protobuf import message as message_mod
39
+ from google.protobuf import text_format
40
+ # We use "as" to avoid name collisions with variables.
41
+ from google.protobuf.internal import api_implementation
42
+ from google.protobuf.internal import containers
43
+ from google.protobuf.internal import decoder
44
+ from google.protobuf.internal import encoder
45
+ from google.protobuf.internal import enum_type_wrapper
46
+ from google.protobuf.internal import extension_dict
47
+ from google.protobuf.internal import message_listener as message_listener_mod
48
+ from google.protobuf.internal import type_checkers
49
+ from google.protobuf.internal import well_known_types
50
+ from google.protobuf.internal import wire_format
51
+
52
+ _FieldDescriptor = descriptor_mod.FieldDescriptor
53
+ _AnyFullTypeName = 'google.protobuf.Any'
54
+ _StructFullTypeName = 'google.protobuf.Struct'
55
+ _ListValueFullTypeName = 'google.protobuf.ListValue'
56
+ _ExtensionDict = extension_dict._ExtensionDict
57
+
58
+ class GeneratedProtocolMessageType(type):
59
+
60
+ """Metaclass for protocol message classes created at runtime from Descriptors.
61
+
62
+ We add implementations for all methods described in the Message class. We
63
+ also create properties to allow getting/setting all fields in the protocol
64
+ message. Finally, we create slots to prevent users from accidentally
65
+ "setting" nonexistent fields in the protocol message, which then wouldn't get
66
+ serialized / deserialized properly.
67
+
68
+ The protocol compiler currently uses this metaclass to create protocol
69
+ message classes at runtime. Clients can also manually create their own
70
+ classes at runtime, as in this example:
71
+
72
+ mydescriptor = Descriptor(.....)
73
+ factory = symbol_database.Default()
74
+ factory.pool.AddDescriptor(mydescriptor)
75
+ MyProtoClass = factory.GetPrototype(mydescriptor)
76
+ myproto_instance = MyProtoClass()
77
+ myproto.foo_field = 23
78
+ ...
79
+ """
80
+
81
+ # Must be consistent with the protocol-compiler code in
82
+ # proto2/compiler/internal/generator.*.
83
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
84
+
85
+ def __new__(cls, name, bases, dictionary):
86
+ """Custom allocation for runtime-generated class types.
87
+
88
+ We override __new__ because this is apparently the only place
89
+ where we can meaningfully set __slots__ on the class we're creating(?).
90
+ (The interplay between metaclasses and slots is not very well-documented).
91
+
92
+ Args:
93
+ name: Name of the class (ignored, but required by the
94
+ metaclass protocol).
95
+ bases: Base classes of the class we're constructing.
96
+ (Should be message.Message). We ignore this field, but
97
+ it's required by the metaclass protocol
98
+ dictionary: The class dictionary of the class we're
99
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
100
+ a Descriptor object describing this protocol message
101
+ type.
102
+
103
+ Returns:
104
+ Newly-allocated class.
105
+
106
+ Raises:
107
+ RuntimeError: Generated code only work with python cpp extension.
108
+ """
109
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
110
+
111
+ if isinstance(descriptor, str):
112
+ raise RuntimeError('The generated code only work with python cpp '
113
+ 'extension, but it is using pure python runtime.')
114
+
115
+ # If a concrete class already exists for this descriptor, don't try to
116
+ # create another. Doing so will break any messages that already exist with
117
+ # the existing class.
118
+ #
119
+ # The C++ implementation appears to have its own internal `PyMessageFactory`
120
+ # to achieve similar results.
121
+ #
122
+ # This most commonly happens in `text_format.py` when using descriptors from
123
+ # a custom pool; it calls symbol_database.Global().getPrototype() on a
124
+ # descriptor which already has an existing concrete class.
125
+ new_class = getattr(descriptor, '_concrete_class', None)
126
+ if new_class:
127
+ return new_class
128
+
129
+ if descriptor.full_name in well_known_types.WKTBASES:
130
+ bases += (well_known_types.WKTBASES[descriptor.full_name],)
131
+ _AddClassAttributesForNestedExtensions(descriptor, dictionary)
132
+ _AddSlots(descriptor, dictionary)
133
+
134
+ superclass = super(GeneratedProtocolMessageType, cls)
135
+ new_class = superclass.__new__(cls, name, bases, dictionary)
136
+ return new_class
137
+
138
+ def __init__(cls, name, bases, dictionary):
139
+ """Here we perform the majority of our work on the class.
140
+ We add enum getters, an __init__ method, implementations
141
+ of all Message methods, and properties for all fields
142
+ in the protocol type.
143
+
144
+ Args:
145
+ name: Name of the class (ignored, but required by the
146
+ metaclass protocol).
147
+ bases: Base classes of the class we're constructing.
148
+ (Should be message.Message). We ignore this field, but
149
+ it's required by the metaclass protocol
150
+ dictionary: The class dictionary of the class we're
151
+ constructing. dictionary[_DESCRIPTOR_KEY] must contain
152
+ a Descriptor object describing this protocol message
153
+ type.
154
+ """
155
+ descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
156
+
157
+ # If this is an _existing_ class looked up via `_concrete_class` in the
158
+ # __new__ method above, then we don't need to re-initialize anything.
159
+ existing_class = getattr(descriptor, '_concrete_class', None)
160
+ if existing_class:
161
+ assert existing_class is cls, (
162
+ 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r'
163
+ % (descriptor.full_name))
164
+ return
165
+
166
+ cls._message_set_decoders_by_tag = {}
167
+ cls._fields_by_tag = {}
168
+ if (descriptor.has_options and
169
+ descriptor.GetOptions().message_set_wire_format):
170
+ cls._message_set_decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
171
+ decoder.MessageSetItemDecoder(descriptor),
172
+ None,
173
+ )
174
+
175
+ # Attach stuff to each FieldDescriptor for quick lookup later on.
176
+ for field in descriptor.fields:
177
+ _AttachFieldHelpers(cls, field)
178
+
179
+ if descriptor.is_extendable and hasattr(descriptor.file, 'pool'):
180
+ extensions = descriptor.file.pool.FindAllExtensions(descriptor)
181
+ for ext in extensions:
182
+ _AttachFieldHelpers(cls, ext)
183
+
184
+ descriptor._concrete_class = cls # pylint: disable=protected-access
185
+ _AddEnumValues(descriptor, cls)
186
+ _AddInitMethod(descriptor, cls)
187
+ _AddPropertiesForFields(descriptor, cls)
188
+ _AddPropertiesForExtensions(descriptor, cls)
189
+ _AddStaticMethods(cls)
190
+ _AddMessageMethods(descriptor, cls)
191
+ _AddPrivateHelperMethods(descriptor, cls)
192
+
193
+ superclass = super(GeneratedProtocolMessageType, cls)
194
+ superclass.__init__(name, bases, dictionary)
195
+
196
+
197
+ # Stateless helpers for GeneratedProtocolMessageType below.
198
+ # Outside clients should not access these directly.
199
+ #
200
+ # I opted not to make any of these methods on the metaclass, to make it more
201
+ # clear that I'm not really using any state there and to keep clients from
202
+ # thinking that they have direct access to these construction helpers.
203
+
204
+
205
+ def _PropertyName(proto_field_name):
206
+ """Returns the name of the public property attribute which
207
+ clients can use to get and (in some cases) set the value
208
+ of a protocol message field.
209
+
210
+ Args:
211
+ proto_field_name: The protocol message field name, exactly
212
+ as it appears (or would appear) in a .proto file.
213
+ """
214
+ # TODO: Escape Python keywords (e.g., yield), and test this support.
215
+ # nnorwitz makes my day by writing:
216
+ # """
217
+ # FYI. See the keyword module in the stdlib. This could be as simple as:
218
+ #
219
+ # if keyword.iskeyword(proto_field_name):
220
+ # return proto_field_name + "_"
221
+ # return proto_field_name
222
+ # """
223
+ # Kenton says: The above is a BAD IDEA. People rely on being able to use
224
+ # getattr() and setattr() to reflectively manipulate field values. If we
225
+ # rename the properties, then every such user has to also make sure to apply
226
+ # the same transformation. Note that currently if you name a field "yield",
227
+ # you can still access it just fine using getattr/setattr -- it's not even
228
+ # that cumbersome to do so.
229
+ # TODO: Remove this method entirely if/when everyone agrees with my
230
+ # position.
231
+ return proto_field_name
232
+
233
+
234
+ def _AddSlots(message_descriptor, dictionary):
235
+ """Adds a __slots__ entry to dictionary, containing the names of all valid
236
+ attributes for this message type.
237
+
238
+ Args:
239
+ message_descriptor: A Descriptor instance describing this message type.
240
+ dictionary: Class dictionary to which we'll add a '__slots__' entry.
241
+ """
242
+ dictionary['__slots__'] = ['_cached_byte_size',
243
+ '_cached_byte_size_dirty',
244
+ '_fields',
245
+ '_unknown_fields',
246
+ '_is_present_in_parent',
247
+ '_listener',
248
+ '_listener_for_children',
249
+ '__weakref__',
250
+ '_oneofs']
251
+
252
+
253
+ def _IsMessageSetExtension(field):
254
+ return (field.is_extension and
255
+ field.containing_type.has_options and
256
+ field.containing_type.GetOptions().message_set_wire_format and
257
+ field.type == _FieldDescriptor.TYPE_MESSAGE and
258
+ field.label == _FieldDescriptor.LABEL_OPTIONAL)
259
+
260
+
261
+ def _IsMapField(field):
262
+ return (field.type == _FieldDescriptor.TYPE_MESSAGE and
263
+ field.message_type._is_map_entry)
264
+
265
+
266
+ def _IsMessageMapField(field):
267
+ value_type = field.message_type.fields_by_name['value']
268
+ return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE
269
+
270
+ def _AttachFieldHelpers(cls, field_descriptor):
271
+ is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED
272
+ field_descriptor._default_constructor = _DefaultValueConstructorForField(
273
+ field_descriptor
274
+ )
275
+
276
+ def AddFieldByTag(wiretype, is_packed):
277
+ tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
278
+ cls._fields_by_tag[tag_bytes] = (field_descriptor, is_packed)
279
+
280
+ AddFieldByTag(
281
+ type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], False
282
+ )
283
+
284
+ if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
285
+ # To support wire compatibility of adding packed = true, add a decoder for
286
+ # packed values regardless of the field's options.
287
+ AddFieldByTag(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
288
+
289
+
290
+ def _MaybeAddEncoder(cls, field_descriptor):
291
+ if hasattr(field_descriptor, '_encoder'):
292
+ return
293
+ is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
294
+ is_map_entry = _IsMapField(field_descriptor)
295
+ is_packed = field_descriptor.is_packed
296
+
297
+ if is_map_entry:
298
+ field_encoder = encoder.MapEncoder(field_descriptor)
299
+ sizer = encoder.MapSizer(field_descriptor,
300
+ _IsMessageMapField(field_descriptor))
301
+ elif _IsMessageSetExtension(field_descriptor):
302
+ field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
303
+ sizer = encoder.MessageSetItemSizer(field_descriptor.number)
304
+ else:
305
+ field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
306
+ field_descriptor.number, is_repeated, is_packed)
307
+ sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
308
+ field_descriptor.number, is_repeated, is_packed)
309
+
310
+ field_descriptor._sizer = sizer
311
+ field_descriptor._encoder = field_encoder
312
+
313
+
314
+ def _MaybeAddDecoder(cls, field_descriptor):
315
+ if hasattr(field_descriptor, '_decoders'):
316
+ return
317
+
318
+ is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED
319
+ is_map_entry = _IsMapField(field_descriptor)
320
+ helper_decoders = {}
321
+
322
+ def AddDecoder(is_packed):
323
+ decode_type = field_descriptor.type
324
+ if (decode_type == _FieldDescriptor.TYPE_ENUM and
325
+ not field_descriptor.enum_type.is_closed):
326
+ decode_type = _FieldDescriptor.TYPE_INT32
327
+
328
+ oneof_descriptor = None
329
+ if field_descriptor.containing_oneof is not None:
330
+ oneof_descriptor = field_descriptor
331
+
332
+ if is_map_entry:
333
+ is_message_map = _IsMessageMapField(field_descriptor)
334
+
335
+ field_decoder = decoder.MapDecoder(
336
+ field_descriptor, _GetInitializeDefaultForMap(field_descriptor),
337
+ is_message_map)
338
+ elif decode_type == _FieldDescriptor.TYPE_STRING:
339
+ field_decoder = decoder.StringDecoder(
340
+ field_descriptor.number, is_repeated, is_packed,
341
+ field_descriptor, field_descriptor._default_constructor,
342
+ not field_descriptor.has_presence)
343
+ elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
344
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
345
+ field_descriptor.number, is_repeated, is_packed,
346
+ field_descriptor, field_descriptor._default_constructor)
347
+ else:
348
+ field_decoder = type_checkers.TYPE_TO_DECODER[decode_type](
349
+ field_descriptor.number, is_repeated, is_packed,
350
+ # pylint: disable=protected-access
351
+ field_descriptor, field_descriptor._default_constructor,
352
+ not field_descriptor.has_presence)
353
+
354
+ helper_decoders[is_packed] = field_decoder
355
+
356
+ AddDecoder(False)
357
+
358
+ if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
359
+ # To support wire compatibility of adding packed = true, add a decoder for
360
+ # packed values regardless of the field's options.
361
+ AddDecoder(True)
362
+
363
+ field_descriptor._decoders = helper_decoders
364
+
365
+
366
+ def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
367
+ extensions = descriptor.extensions_by_name
368
+ for extension_name, extension_field in extensions.items():
369
+ assert extension_name not in dictionary
370
+ dictionary[extension_name] = extension_field
371
+
372
+
373
+ def _AddEnumValues(descriptor, cls):
374
+ """Sets class-level attributes for all enum fields defined in this message.
375
+
376
+ Also exporting a class-level object that can name enum values.
377
+
378
+ Args:
379
+ descriptor: Descriptor object for this message type.
380
+ cls: Class we're constructing for this message type.
381
+ """
382
+ for enum_type in descriptor.enum_types:
383
+ setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
384
+ for enum_value in enum_type.values:
385
+ setattr(cls, enum_value.name, enum_value.number)
386
+
387
+
388
+ def _GetInitializeDefaultForMap(field):
389
+ if field.label != _FieldDescriptor.LABEL_REPEATED:
390
+ raise ValueError('map_entry set on non-repeated field %s' % (
391
+ field.name))
392
+ fields_by_name = field.message_type.fields_by_name
393
+ key_checker = type_checkers.GetTypeChecker(fields_by_name['key'])
394
+
395
+ value_field = fields_by_name['value']
396
+ if _IsMessageMapField(field):
397
+ def MakeMessageMapDefault(message):
398
+ return containers.MessageMap(
399
+ message._listener_for_children, value_field.message_type, key_checker,
400
+ field.message_type)
401
+ return MakeMessageMapDefault
402
+ else:
403
+ value_checker = type_checkers.GetTypeChecker(value_field)
404
+ def MakePrimitiveMapDefault(message):
405
+ return containers.ScalarMap(
406
+ message._listener_for_children, key_checker, value_checker,
407
+ field.message_type)
408
+ return MakePrimitiveMapDefault
409
+
410
+ def _DefaultValueConstructorForField(field):
411
+ """Returns a function which returns a default value for a field.
412
+
413
+ Args:
414
+ field: FieldDescriptor object for this field.
415
+
416
+ The returned function has one argument:
417
+ message: Message instance containing this field, or a weakref proxy
418
+ of same.
419
+
420
+ That function in turn returns a default value for this field. The default
421
+ value may refer back to |message| via a weak reference.
422
+ """
423
+
424
+ if _IsMapField(field):
425
+ return _GetInitializeDefaultForMap(field)
426
+
427
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
428
+ if field.has_default_value and field.default_value != []:
429
+ raise ValueError('Repeated field default value not empty list: %s' % (
430
+ field.default_value))
431
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
432
+ # We can't look at _concrete_class yet since it might not have
433
+ # been set. (Depends on order in which we initialize the classes).
434
+ message_type = field.message_type
435
+ def MakeRepeatedMessageDefault(message):
436
+ return containers.RepeatedCompositeFieldContainer(
437
+ message._listener_for_children, field.message_type)
438
+ return MakeRepeatedMessageDefault
439
+ else:
440
+ type_checker = type_checkers.GetTypeChecker(field)
441
+ def MakeRepeatedScalarDefault(message):
442
+ return containers.RepeatedScalarFieldContainer(
443
+ message._listener_for_children, type_checker)
444
+ return MakeRepeatedScalarDefault
445
+
446
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
447
+ message_type = field.message_type
448
+ def MakeSubMessageDefault(message):
449
+ # _concrete_class may not yet be initialized.
450
+ if not hasattr(message_type, '_concrete_class'):
451
+ from google.protobuf import message_factory
452
+ message_factory.GetMessageClass(message_type)
453
+ result = message_type._concrete_class()
454
+ result._SetListener(
455
+ _OneofListener(message, field)
456
+ if field.containing_oneof is not None
457
+ else message._listener_for_children)
458
+ return result
459
+ return MakeSubMessageDefault
460
+
461
+ def MakeScalarDefault(message):
462
+ # TODO: This may be broken since there may not be
463
+ # default_value. Combine with has_default_value somehow.
464
+ return field.default_value
465
+ return MakeScalarDefault
466
+
467
+
468
+ def _ReraiseTypeErrorWithFieldName(message_name, field_name):
469
+ """Re-raise the currently-handled TypeError with the field name added."""
470
+ exc = sys.exc_info()[1]
471
+ if len(exc.args) == 1 and type(exc) is TypeError:
472
+ # simple TypeError; add field name to exception message
473
+ exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name))
474
+
475
+ # re-raise possibly-amended exception with original traceback:
476
+ raise exc.with_traceback(sys.exc_info()[2])
477
+
478
+
479
+ def _AddInitMethod(message_descriptor, cls):
480
+ """Adds an __init__ method to cls."""
481
+
482
+ def _GetIntegerEnumValue(enum_type, value):
483
+ """Convert a string or integer enum value to an integer.
484
+
485
+ If the value is a string, it is converted to the enum value in
486
+ enum_type with the same name. If the value is not a string, it's
487
+ returned as-is. (No conversion or bounds-checking is done.)
488
+ """
489
+ if isinstance(value, str):
490
+ try:
491
+ return enum_type.values_by_name[value].number
492
+ except KeyError:
493
+ raise ValueError('Enum type %s: unknown label "%s"' % (
494
+ enum_type.full_name, value))
495
+ return value
496
+
497
+ def init(self, **kwargs):
498
+ self._cached_byte_size = 0
499
+ self._cached_byte_size_dirty = len(kwargs) > 0
500
+ self._fields = {}
501
+ # Contains a mapping from oneof field descriptors to the descriptor
502
+ # of the currently set field in that oneof field.
503
+ self._oneofs = {}
504
+
505
+ # _unknown_fields is () when empty for efficiency, and will be turned into
506
+ # a list if fields are added.
507
+ self._unknown_fields = ()
508
+ self._is_present_in_parent = False
509
+ self._listener = message_listener_mod.NullMessageListener()
510
+ self._listener_for_children = _Listener(self)
511
+ for field_name, field_value in kwargs.items():
512
+ field = _GetFieldByName(message_descriptor, field_name)
513
+ if field is None:
514
+ raise TypeError('%s() got an unexpected keyword argument "%s"' %
515
+ (message_descriptor.name, field_name))
516
+ if field_value is None:
517
+ # field=None is the same as no field at all.
518
+ continue
519
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
520
+ field_copy = field._default_constructor(self)
521
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
522
+ if _IsMapField(field):
523
+ if _IsMessageMapField(field):
524
+ for key in field_value:
525
+ field_copy[key].MergeFrom(field_value[key])
526
+ else:
527
+ field_copy.update(field_value)
528
+ else:
529
+ for val in field_value:
530
+ if isinstance(val, dict):
531
+ field_copy.add(**val)
532
+ else:
533
+ field_copy.add().MergeFrom(val)
534
+ else: # Scalar
535
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
536
+ field_value = [_GetIntegerEnumValue(field.enum_type, val)
537
+ for val in field_value]
538
+ field_copy.extend(field_value)
539
+ self._fields[field] = field_copy
540
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
541
+ field_copy = field._default_constructor(self)
542
+ new_val = None
543
+ if isinstance(field_value, message_mod.Message):
544
+ new_val = field_value
545
+ elif isinstance(field_value, dict):
546
+ if field.message_type.full_name == _StructFullTypeName:
547
+ field_copy.Clear()
548
+ if len(field_value) == 1 and 'fields' in field_value:
549
+ try:
550
+ field_copy.update(field_value)
551
+ except:
552
+ # Fall back to init normal message field
553
+ field_copy.Clear()
554
+ new_val = field.message_type._concrete_class(**field_value)
555
+ else:
556
+ field_copy.update(field_value)
557
+ else:
558
+ new_val = field.message_type._concrete_class(**field_value)
559
+ elif hasattr(field_copy, '_internal_assign'):
560
+ field_copy._internal_assign(field_value)
561
+ else:
562
+ raise TypeError(
563
+ 'Message field {0}.{1} must be initialized with a '
564
+ 'dict or instance of same class, got {2}.'.format(
565
+ message_descriptor.name,
566
+ field_name,
567
+ type(field_value).__name__,
568
+ )
569
+ )
570
+
571
+ if new_val:
572
+ try:
573
+ field_copy.MergeFrom(new_val)
574
+ except TypeError:
575
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
576
+ self._fields[field] = field_copy
577
+ else:
578
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
579
+ field_value = _GetIntegerEnumValue(field.enum_type, field_value)
580
+ try:
581
+ setattr(self, field_name, field_value)
582
+ except TypeError:
583
+ _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name)
584
+
585
+ init.__module__ = None
586
+ init.__doc__ = None
587
+ cls.__init__ = init
588
+
589
+
590
+ def _GetFieldByName(message_descriptor, field_name):
591
+ """Returns a field descriptor by field name.
592
+
593
+ Args:
594
+ message_descriptor: A Descriptor describing all fields in message.
595
+ field_name: The name of the field to retrieve.
596
+ Returns:
597
+ The field descriptor associated with the field name.
598
+ """
599
+ try:
600
+ return message_descriptor.fields_by_name[field_name]
601
+ except KeyError:
602
+ raise ValueError('Protocol message %s has no "%s" field.' %
603
+ (message_descriptor.name, field_name))
604
+
605
+
606
+ def _AddPropertiesForFields(descriptor, cls):
607
+ """Adds properties for all fields in this protocol message type."""
608
+ for field in descriptor.fields:
609
+ _AddPropertiesForField(field, cls)
610
+
611
+ if descriptor.is_extendable:
612
+ # _ExtensionDict is just an adaptor with no state so we allocate a new one
613
+ # every time it is accessed.
614
+ cls.Extensions = property(lambda self: _ExtensionDict(self))
615
+
616
+
617
+ def _AddPropertiesForField(field, cls):
618
+ """Adds a public property for a protocol message field.
619
+ Clients can use this property to get and (in the case
620
+ of non-repeated scalar fields) directly set the value
621
+ of a protocol message field.
622
+
623
+ Args:
624
+ field: A FieldDescriptor for this field.
625
+ cls: The class we're constructing.
626
+ """
627
+ # Catch it if we add other types that we should
628
+ # handle specially here.
629
+ assert _FieldDescriptor.MAX_CPPTYPE == 10
630
+
631
+ constant_name = field.name.upper() + '_FIELD_NUMBER'
632
+ setattr(cls, constant_name, field.number)
633
+
634
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
635
+ _AddPropertiesForRepeatedField(field, cls)
636
+ elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
637
+ _AddPropertiesForNonRepeatedCompositeField(field, cls)
638
+ else:
639
+ _AddPropertiesForNonRepeatedScalarField(field, cls)
640
+
641
+
642
+ class _FieldProperty(property):
643
+ __slots__ = ('DESCRIPTOR',)
644
+
645
+ def __init__(self, descriptor, getter, setter, doc):
646
+ property.__init__(self, getter, setter, doc=doc)
647
+ self.DESCRIPTOR = descriptor
648
+
649
+
650
+ def _AddPropertiesForRepeatedField(field, cls):
651
+ """Adds a public property for a "repeated" protocol message field. Clients
652
+ can use this property to get the value of the field, which will be either a
653
+ RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see
654
+ below).
655
+
656
+ Note that when clients add values to these containers, we perform
657
+ type-checking in the case of repeated scalar fields, and we also set any
658
+ necessary "has" bits as a side-effect.
659
+
660
+ Args:
661
+ field: A FieldDescriptor for this field.
662
+ cls: The class we're constructing.
663
+ """
664
+ proto_field_name = field.name
665
+ property_name = _PropertyName(proto_field_name)
666
+
667
+ def getter(self):
668
+ field_value = self._fields.get(field)
669
+ if field_value is None:
670
+ # Construct a new object to represent this field.
671
+ field_value = field._default_constructor(self)
672
+
673
+ # Atomically check if another thread has preempted us and, if not, swap
674
+ # in the new object we just created. If someone has preempted us, we
675
+ # take that object and discard ours.
676
+ # WARNING: We are relying on setdefault() being atomic. This is true
677
+ # in CPython but we haven't investigated others. This warning appears
678
+ # in several other locations in this file.
679
+ field_value = self._fields.setdefault(field, field_value)
680
+ return field_value
681
+ getter.__module__ = None
682
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
683
+
684
+ # We define a setter just so we can throw an exception with a more
685
+ # helpful error message.
686
+ def setter(self, new_value):
687
+ raise AttributeError('Assignment not allowed to repeated field '
688
+ '"%s" in protocol message object.' % proto_field_name)
689
+
690
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
691
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
692
+
693
+
694
+ def _AddPropertiesForNonRepeatedScalarField(field, cls):
695
+ """Adds a public property for a nonrepeated, scalar protocol message field.
696
+ Clients can use this property to get and directly set the value of the field.
697
+ Note that when the client sets the value of a field by using this property,
698
+ all necessary "has" bits are set as a side-effect, and we also perform
699
+ type-checking.
700
+
701
+ Args:
702
+ field: A FieldDescriptor for this field.
703
+ cls: The class we're constructing.
704
+ """
705
+ proto_field_name = field.name
706
+ property_name = _PropertyName(proto_field_name)
707
+ type_checker = type_checkers.GetTypeChecker(field)
708
+ default_value = field.default_value
709
+
710
+ def getter(self):
711
+ # TODO: This may be broken since there may not be
712
+ # default_value. Combine with has_default_value somehow.
713
+ return self._fields.get(field, default_value)
714
+ getter.__module__ = None
715
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
716
+
717
+ def field_setter(self, new_value):
718
+ # pylint: disable=protected-access
719
+ # Testing the value for truthiness captures all of the proto3 defaults
720
+ # (0, 0.0, enum 0, and False).
721
+ try:
722
+ new_value = type_checker.CheckValue(new_value)
723
+ except TypeError as e:
724
+ raise TypeError(
725
+ 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e))
726
+ if not field.has_presence and not new_value:
727
+ self._fields.pop(field, None)
728
+ else:
729
+ self._fields[field] = new_value
730
+ # Check _cached_byte_size_dirty inline to improve performance, since scalar
731
+ # setters are called frequently.
732
+ if not self._cached_byte_size_dirty:
733
+ self._Modified()
734
+
735
+ if field.containing_oneof:
736
+ def setter(self, new_value):
737
+ field_setter(self, new_value)
738
+ self._UpdateOneofState(field)
739
+ else:
740
+ setter = field_setter
741
+
742
+ setter.__module__ = None
743
+ setter.__doc__ = 'Setter for %s.' % proto_field_name
744
+
745
+ # Add a property to encapsulate the getter/setter.
746
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
747
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
748
+
749
+
750
+ def _AddPropertiesForNonRepeatedCompositeField(field, cls):
751
+ """Adds a public property for a nonrepeated, composite protocol message field.
752
+ A composite field is a "group" or "message" field.
753
+
754
+ Clients can use this property to get the value of the field, but cannot
755
+ assign to the property directly.
756
+
757
+ Args:
758
+ field: A FieldDescriptor for this field.
759
+ cls: The class we're constructing.
760
+ """
761
+ # TODO: Remove duplication with similar method
762
+ # for non-repeated scalars.
763
+ proto_field_name = field.name
764
+ property_name = _PropertyName(proto_field_name)
765
+
766
+ def getter(self):
767
+ field_value = self._fields.get(field)
768
+ if field_value is None:
769
+ # Construct a new object to represent this field.
770
+ field_value = field._default_constructor(self)
771
+
772
+ # Atomically check if another thread has preempted us and, if not, swap
773
+ # in the new object we just created. If someone has preempted us, we
774
+ # take that object and discard ours.
775
+ # WARNING: We are relying on setdefault() being atomic. This is true
776
+ # in CPython but we haven't investigated others. This warning appears
777
+ # in several other locations in this file.
778
+ field_value = self._fields.setdefault(field, field_value)
779
+ return field_value
780
+ getter.__module__ = None
781
+ getter.__doc__ = 'Getter for %s.' % proto_field_name
782
+
783
+ # We define a setter just so we can throw an exception with a more
784
+ # helpful error message.
785
+ def setter(self, new_value):
786
+ if field.message_type.full_name == 'google.protobuf.Timestamp':
787
+ getter(self)
788
+ self._fields[field].FromDatetime(new_value)
789
+ elif field.message_type.full_name == 'google.protobuf.Duration':
790
+ getter(self)
791
+ self._fields[field].FromTimedelta(new_value)
792
+ elif field.message_type.full_name == _StructFullTypeName:
793
+ getter(self)
794
+ self._fields[field].Clear()
795
+ self._fields[field].update(new_value)
796
+ elif field.message_type.full_name == _ListValueFullTypeName:
797
+ getter(self)
798
+ self._fields[field].Clear()
799
+ self._fields[field].extend(new_value)
800
+ else:
801
+ raise AttributeError(
802
+ 'Assignment not allowed to composite field '
803
+ '"%s" in protocol message object.' % proto_field_name
804
+ )
805
+
806
+ # Add a property to encapsulate the getter.
807
+ doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
808
+ setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc))
809
+
810
+
811
+ def _AddPropertiesForExtensions(descriptor, cls):
812
+ """Adds properties for all fields in this protocol message type."""
813
+ extensions = descriptor.extensions_by_name
814
+ for extension_name, extension_field in extensions.items():
815
+ constant_name = extension_name.upper() + '_FIELD_NUMBER'
816
+ setattr(cls, constant_name, extension_field.number)
817
+
818
+ # TODO: Migrate all users of these attributes to functions like
819
+ # pool.FindExtensionByNumber(descriptor).
820
+ if descriptor.file is not None:
821
+ # TODO: Use cls.MESSAGE_FACTORY.pool when available.
822
+ pool = descriptor.file.pool
823
+
824
+ def _AddStaticMethods(cls):
825
+ def FromString(s):
826
+ message = cls()
827
+ message.MergeFromString(s)
828
+ return message
829
+ cls.FromString = staticmethod(FromString)
830
+
831
+
832
+ def _IsPresent(item):
833
+ """Given a (FieldDescriptor, value) tuple from _fields, return true if the
834
+ value should be included in the list returned by ListFields()."""
835
+
836
+ if item[0].label == _FieldDescriptor.LABEL_REPEATED:
837
+ return bool(item[1])
838
+ elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
839
+ return item[1]._is_present_in_parent
840
+ else:
841
+ return True
842
+
843
+
844
+ def _AddListFieldsMethod(message_descriptor, cls):
845
+ """Helper for _AddMessageMethods()."""
846
+
847
+ def ListFields(self):
848
+ all_fields = [item for item in self._fields.items() if _IsPresent(item)]
849
+ all_fields.sort(key = lambda item: item[0].number)
850
+ return all_fields
851
+
852
+ cls.ListFields = ListFields
853
+
854
+
855
+ def _AddHasFieldMethod(message_descriptor, cls):
856
+ """Helper for _AddMessageMethods()."""
857
+
858
+ hassable_fields = {}
859
+ for field in message_descriptor.fields:
860
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
861
+ continue
862
+ # For proto3, only submessages and fields inside a oneof have presence.
863
+ if not field.has_presence:
864
+ continue
865
+ hassable_fields[field.name] = field
866
+
867
+ # Has methods are supported for oneof descriptors.
868
+ for oneof in message_descriptor.oneofs:
869
+ hassable_fields[oneof.name] = oneof
870
+
871
+ def HasField(self, field_name):
872
+ try:
873
+ field = hassable_fields[field_name]
874
+ except KeyError as exc:
875
+ raise ValueError('Protocol message %s has no non-repeated field "%s" '
876
+ 'nor has presence is not available for this field.' % (
877
+ message_descriptor.full_name, field_name)) from exc
878
+
879
+ if isinstance(field, descriptor_mod.OneofDescriptor):
880
+ try:
881
+ return HasField(self, self._oneofs[field].name)
882
+ except KeyError:
883
+ return False
884
+ else:
885
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
886
+ value = self._fields.get(field)
887
+ return value is not None and value._is_present_in_parent
888
+ else:
889
+ return field in self._fields
890
+
891
+ cls.HasField = HasField
892
+
893
+
894
+ def _AddClearFieldMethod(message_descriptor, cls):
895
+ """Helper for _AddMessageMethods()."""
896
+ def ClearField(self, field_name):
897
+ try:
898
+ field = message_descriptor.fields_by_name[field_name]
899
+ except KeyError:
900
+ try:
901
+ field = message_descriptor.oneofs_by_name[field_name]
902
+ if field in self._oneofs:
903
+ field = self._oneofs[field]
904
+ else:
905
+ return
906
+ except KeyError:
907
+ raise ValueError('Protocol message %s has no "%s" field.' %
908
+ (message_descriptor.name, field_name))
909
+
910
+ if field in self._fields:
911
+ # To match the C++ implementation, we need to invalidate iterators
912
+ # for map fields when ClearField() happens.
913
+ if hasattr(self._fields[field], 'InvalidateIterators'):
914
+ self._fields[field].InvalidateIterators()
915
+
916
+ # Note: If the field is a sub-message, its listener will still point
917
+ # at us. That's fine, because the worst than can happen is that it
918
+ # will call _Modified() and invalidate our byte size. Big deal.
919
+ del self._fields[field]
920
+
921
+ if self._oneofs.get(field.containing_oneof, None) is field:
922
+ del self._oneofs[field.containing_oneof]
923
+
924
+ # Always call _Modified() -- even if nothing was changed, this is
925
+ # a mutating method, and thus calling it should cause the field to become
926
+ # present in the parent message.
927
+ self._Modified()
928
+
929
+ cls.ClearField = ClearField
930
+
931
+
932
+ def _AddClearExtensionMethod(cls):
933
+ """Helper for _AddMessageMethods()."""
934
+ def ClearExtension(self, field_descriptor):
935
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
936
+
937
+ # Similar to ClearField(), above.
938
+ if field_descriptor in self._fields:
939
+ del self._fields[field_descriptor]
940
+ self._Modified()
941
+ cls.ClearExtension = ClearExtension
942
+
943
+
944
+ def _AddHasExtensionMethod(cls):
945
+ """Helper for _AddMessageMethods()."""
946
+ def HasExtension(self, field_descriptor):
947
+ extension_dict._VerifyExtensionHandle(self, field_descriptor)
948
+ if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED:
949
+ raise KeyError('"%s" is repeated.' % field_descriptor.full_name)
950
+
951
+ if field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
952
+ value = self._fields.get(field_descriptor)
953
+ return value is not None and value._is_present_in_parent
954
+ else:
955
+ return field_descriptor in self._fields
956
+ cls.HasExtension = HasExtension
957
+
958
+ def _InternalUnpackAny(msg):
959
+ """Unpacks Any message and returns the unpacked message.
960
+
961
+ This internal method is different from public Any Unpack method which takes
962
+ the target message as argument. _InternalUnpackAny method does not have
963
+ target message type and need to find the message type in descriptor pool.
964
+
965
+ Args:
966
+ msg: An Any message to be unpacked.
967
+
968
+ Returns:
969
+ The unpacked message.
970
+ """
971
+ # TODO: Don't use the factory of generated messages.
972
+ # To make Any work with custom factories, use the message factory of the
973
+ # parent message.
974
+ # pylint: disable=g-import-not-at-top
975
+ from google.protobuf import symbol_database
976
+ factory = symbol_database.Default()
977
+
978
+ type_url = msg.type_url
979
+
980
+ if not type_url:
981
+ return None
982
+
983
+ # TODO: For now we just strip the hostname. Better logic will be
984
+ # required.
985
+ type_name = type_url.split('/')[-1]
986
+ descriptor = factory.pool.FindMessageTypeByName(type_name)
987
+
988
+ if descriptor is None:
989
+ return None
990
+
991
+ message_class = factory.GetPrototype(descriptor)
992
+ message = message_class()
993
+
994
+ message.ParseFromString(msg.value)
995
+ return message
996
+
997
+
998
+ def _AddEqualsMethod(message_descriptor, cls):
999
+ """Helper for _AddMessageMethods()."""
1000
+ def __eq__(self, other):
1001
+ if self.DESCRIPTOR.full_name == _ListValueFullTypeName and isinstance(
1002
+ other, list
1003
+ ):
1004
+ return self._internal_compare(other)
1005
+ if self.DESCRIPTOR.full_name == _StructFullTypeName and isinstance(
1006
+ other, dict
1007
+ ):
1008
+ return self._internal_compare(other)
1009
+
1010
+ if (not isinstance(other, message_mod.Message) or
1011
+ other.DESCRIPTOR != self.DESCRIPTOR):
1012
+ return NotImplemented
1013
+
1014
+ if self is other:
1015
+ return True
1016
+
1017
+ if self.DESCRIPTOR.full_name == _AnyFullTypeName:
1018
+ any_a = _InternalUnpackAny(self)
1019
+ any_b = _InternalUnpackAny(other)
1020
+ if any_a and any_b:
1021
+ return any_a == any_b
1022
+
1023
+ if not self.ListFields() == other.ListFields():
1024
+ return False
1025
+
1026
+ # TODO: Fix UnknownFieldSet to consider MessageSet extensions,
1027
+ # then use it for the comparison.
1028
+ unknown_fields = list(self._unknown_fields)
1029
+ unknown_fields.sort()
1030
+ other_unknown_fields = list(other._unknown_fields)
1031
+ other_unknown_fields.sort()
1032
+ return unknown_fields == other_unknown_fields
1033
+
1034
+ cls.__eq__ = __eq__
1035
+
1036
+
1037
+ def _AddStrMethod(message_descriptor, cls):
1038
+ """Helper for _AddMessageMethods()."""
1039
+ def __str__(self):
1040
+ return text_format.MessageToString(self)
1041
+ cls.__str__ = __str__
1042
+
1043
+
1044
+ def _AddReprMethod(message_descriptor, cls):
1045
+ """Helper for _AddMessageMethods()."""
1046
+ def __repr__(self):
1047
+ return text_format.MessageToString(self)
1048
+ cls.__repr__ = __repr__
1049
+
1050
+
1051
+ def _AddUnicodeMethod(unused_message_descriptor, cls):
1052
+ """Helper for _AddMessageMethods()."""
1053
+
1054
+ def __unicode__(self):
1055
+ return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
1056
+ cls.__unicode__ = __unicode__
1057
+
1058
+
1059
+ def _AddContainsMethod(message_descriptor, cls):
1060
+
1061
+ if message_descriptor.full_name == 'google.protobuf.Struct':
1062
+ def __contains__(self, key):
1063
+ return key in self.fields
1064
+ elif message_descriptor.full_name == 'google.protobuf.ListValue':
1065
+ def __contains__(self, value):
1066
+ return value in self.items()
1067
+ else:
1068
+ def __contains__(self, field):
1069
+ return self.HasField(field)
1070
+
1071
+ cls.__contains__ = __contains__
1072
+
1073
+
1074
+ def _BytesForNonRepeatedElement(value, field_number, field_type):
1075
+ """Returns the number of bytes needed to serialize a non-repeated element.
1076
+ The returned byte count includes space for tag information and any
1077
+ other additional space associated with serializing value.
1078
+
1079
+ Args:
1080
+ value: Value we're serializing.
1081
+ field_number: Field number of this value. (Since the field number
1082
+ is stored as part of a varint-encoded tag, this has an impact
1083
+ on the total bytes required to serialize the value).
1084
+ field_type: The type of the field. One of the TYPE_* constants
1085
+ within FieldDescriptor.
1086
+ """
1087
+ try:
1088
+ fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
1089
+ return fn(field_number, value)
1090
+ except KeyError:
1091
+ raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
1092
+
1093
+
1094
+ def _AddByteSizeMethod(message_descriptor, cls):
1095
+ """Helper for _AddMessageMethods()."""
1096
+
1097
+ def ByteSize(self):
1098
+ if not self._cached_byte_size_dirty:
1099
+ return self._cached_byte_size
1100
+
1101
+ size = 0
1102
+ descriptor = self.DESCRIPTOR
1103
+ if descriptor._is_map_entry:
1104
+ # Fields of map entry should always be serialized.
1105
+ key_field = descriptor.fields_by_name['key']
1106
+ _MaybeAddEncoder(cls, key_field)
1107
+ size = key_field._sizer(self.key)
1108
+ value_field = descriptor.fields_by_name['value']
1109
+ _MaybeAddEncoder(cls, value_field)
1110
+ size += value_field._sizer(self.value)
1111
+ else:
1112
+ for field_descriptor, field_value in self.ListFields():
1113
+ _MaybeAddEncoder(cls, field_descriptor)
1114
+ size += field_descriptor._sizer(field_value)
1115
+ for tag_bytes, value_bytes in self._unknown_fields:
1116
+ size += len(tag_bytes) + len(value_bytes)
1117
+
1118
+ self._cached_byte_size = size
1119
+ self._cached_byte_size_dirty = False
1120
+ self._listener_for_children.dirty = False
1121
+ return size
1122
+
1123
+ cls.ByteSize = ByteSize
1124
+
1125
+
1126
+ def _AddSerializeToStringMethod(message_descriptor, cls):
1127
+ """Helper for _AddMessageMethods()."""
1128
+
1129
+ def SerializeToString(self, **kwargs):
1130
+ # Check if the message has all of its required fields set.
1131
+ if not self.IsInitialized():
1132
+ raise message_mod.EncodeError(
1133
+ 'Message %s is missing required fields: %s' % (
1134
+ self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
1135
+ return self.SerializePartialToString(**kwargs)
1136
+ cls.SerializeToString = SerializeToString
1137
+
1138
+
1139
+ def _AddSerializePartialToStringMethod(message_descriptor, cls):
1140
+ """Helper for _AddMessageMethods()."""
1141
+
1142
+ def SerializePartialToString(self, **kwargs):
1143
+ out = BytesIO()
1144
+ self._InternalSerialize(out.write, **kwargs)
1145
+ return out.getvalue()
1146
+ cls.SerializePartialToString = SerializePartialToString
1147
+
1148
+ def InternalSerialize(self, write_bytes, deterministic=None):
1149
+ if deterministic is None:
1150
+ deterministic = (
1151
+ api_implementation.IsPythonDefaultSerializationDeterministic())
1152
+ else:
1153
+ deterministic = bool(deterministic)
1154
+
1155
+ descriptor = self.DESCRIPTOR
1156
+ if descriptor._is_map_entry:
1157
+ # Fields of map entry should always be serialized.
1158
+ key_field = descriptor.fields_by_name['key']
1159
+ _MaybeAddEncoder(cls, key_field)
1160
+ key_field._encoder(write_bytes, self.key, deterministic)
1161
+ value_field = descriptor.fields_by_name['value']
1162
+ _MaybeAddEncoder(cls, value_field)
1163
+ value_field._encoder(write_bytes, self.value, deterministic)
1164
+ else:
1165
+ for field_descriptor, field_value in self.ListFields():
1166
+ _MaybeAddEncoder(cls, field_descriptor)
1167
+ field_descriptor._encoder(write_bytes, field_value, deterministic)
1168
+ for tag_bytes, value_bytes in self._unknown_fields:
1169
+ write_bytes(tag_bytes)
1170
+ write_bytes(value_bytes)
1171
+ cls._InternalSerialize = InternalSerialize
1172
+
1173
+
1174
+ def _AddMergeFromStringMethod(message_descriptor, cls):
1175
+ """Helper for _AddMessageMethods()."""
1176
+ def MergeFromString(self, serialized):
1177
+ serialized = memoryview(serialized)
1178
+ length = len(serialized)
1179
+ try:
1180
+ if self._InternalParse(serialized, 0, length) != length:
1181
+ # The only reason _InternalParse would return early is if it
1182
+ # encountered an end-group tag.
1183
+ raise message_mod.DecodeError('Unexpected end-group tag.')
1184
+ except (IndexError, TypeError):
1185
+ # Now ord(buf[p:p+1]) == ord('') gets TypeError.
1186
+ raise message_mod.DecodeError('Truncated message.')
1187
+ except struct.error as e:
1188
+ raise message_mod.DecodeError(e)
1189
+ return length # Return this for legacy reasons.
1190
+ cls.MergeFromString = MergeFromString
1191
+
1192
+ local_ReadTag = decoder.ReadTag
1193
+ local_SkipField = decoder.SkipField
1194
+ fields_by_tag = cls._fields_by_tag
1195
+ message_set_decoders_by_tag = cls._message_set_decoders_by_tag
1196
+
1197
+ def InternalParse(self, buffer, pos, end):
1198
+ """Create a message from serialized bytes.
1199
+
1200
+ Args:
1201
+ self: Message, instance of the proto message object.
1202
+ buffer: memoryview of the serialized data.
1203
+ pos: int, position to start in the serialized data.
1204
+ end: int, end position of the serialized data.
1205
+
1206
+ Returns:
1207
+ Message object.
1208
+ """
1209
+ # Guard against internal misuse, since this function is called internally
1210
+ # quite extensively, and its easy to accidentally pass bytes.
1211
+ assert isinstance(buffer, memoryview)
1212
+ self._Modified()
1213
+ field_dict = self._fields
1214
+ while pos != end:
1215
+ (tag_bytes, new_pos) = local_ReadTag(buffer, pos)
1216
+ field_decoder, field_des = message_set_decoders_by_tag.get(
1217
+ tag_bytes, (None, None)
1218
+ )
1219
+ if field_decoder:
1220
+ pos = field_decoder(buffer, new_pos, end, self, field_dict)
1221
+ continue
1222
+ field_des, is_packed = fields_by_tag.get(tag_bytes, (None, None))
1223
+ if field_des is None:
1224
+ if not self._unknown_fields: # pylint: disable=protected-access
1225
+ self._unknown_fields = [] # pylint: disable=protected-access
1226
+ # pylint: disable=protected-access
1227
+ (tag, _) = decoder._DecodeVarint(tag_bytes, 0)
1228
+ field_number, wire_type = wire_format.UnpackTag(tag)
1229
+ if field_number == 0:
1230
+ raise message_mod.DecodeError('Field number 0 is illegal.')
1231
+ # TODO: remove old_pos.
1232
+ old_pos = new_pos
1233
+ (data, new_pos) = decoder._DecodeUnknownField(
1234
+ buffer, new_pos, wire_type) # pylint: disable=protected-access
1235
+ if new_pos == -1:
1236
+ return pos
1237
+ # TODO: remove _unknown_fields.
1238
+ new_pos = local_SkipField(buffer, old_pos, end, tag_bytes)
1239
+ if new_pos == -1:
1240
+ return pos
1241
+ self._unknown_fields.append(
1242
+ (tag_bytes, buffer[old_pos:new_pos].tobytes()))
1243
+ pos = new_pos
1244
+ else:
1245
+ _MaybeAddDecoder(cls, field_des)
1246
+ field_decoder = field_des._decoders[is_packed]
1247
+ pos = field_decoder(buffer, new_pos, end, self, field_dict)
1248
+ if field_des.containing_oneof:
1249
+ self._UpdateOneofState(field_des)
1250
+ return pos
1251
+ cls._InternalParse = InternalParse
1252
+
1253
+
1254
+ def _AddIsInitializedMethod(message_descriptor, cls):
1255
+ """Adds the IsInitialized and FindInitializationError methods to the
1256
+ protocol message class."""
1257
+
1258
+ required_fields = [field for field in message_descriptor.fields
1259
+ if field.label == _FieldDescriptor.LABEL_REQUIRED]
1260
+
1261
+ def IsInitialized(self, errors=None):
1262
+ """Checks if all required fields of a message are set.
1263
+
1264
+ Args:
1265
+ errors: A list which, if provided, will be populated with the field
1266
+ paths of all missing required fields.
1267
+
1268
+ Returns:
1269
+ True iff the specified message has all required fields set.
1270
+ """
1271
+
1272
+ # Performance is critical so we avoid HasField() and ListFields().
1273
+
1274
+ for field in required_fields:
1275
+ if (field not in self._fields or
1276
+ (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
1277
+ not self._fields[field]._is_present_in_parent)):
1278
+ if errors is not None:
1279
+ errors.extend(self.FindInitializationErrors())
1280
+ return False
1281
+
1282
+ for field, value in list(self._fields.items()): # dict can change size!
1283
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1284
+ if field.label == _FieldDescriptor.LABEL_REPEATED:
1285
+ if (field.message_type._is_map_entry):
1286
+ continue
1287
+ for element in value:
1288
+ if not element.IsInitialized():
1289
+ if errors is not None:
1290
+ errors.extend(self.FindInitializationErrors())
1291
+ return False
1292
+ elif value._is_present_in_parent and not value.IsInitialized():
1293
+ if errors is not None:
1294
+ errors.extend(self.FindInitializationErrors())
1295
+ return False
1296
+
1297
+ return True
1298
+
1299
+ cls.IsInitialized = IsInitialized
1300
+
1301
+ def FindInitializationErrors(self):
1302
+ """Finds required fields which are not initialized.
1303
+
1304
+ Returns:
1305
+ A list of strings. Each string is a path to an uninitialized field from
1306
+ the top-level message, e.g. "foo.bar[5].baz".
1307
+ """
1308
+
1309
+ errors = [] # simplify things
1310
+
1311
+ for field in required_fields:
1312
+ if not self.HasField(field.name):
1313
+ errors.append(field.name)
1314
+
1315
+ for field, value in self.ListFields():
1316
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1317
+ if field.is_extension:
1318
+ name = '(%s)' % field.full_name
1319
+ else:
1320
+ name = field.name
1321
+
1322
+ if _IsMapField(field):
1323
+ if _IsMessageMapField(field):
1324
+ for key in value:
1325
+ element = value[key]
1326
+ prefix = '%s[%s].' % (name, key)
1327
+ sub_errors = element.FindInitializationErrors()
1328
+ errors += [prefix + error for error in sub_errors]
1329
+ else:
1330
+ # ScalarMaps can't have any initialization errors.
1331
+ pass
1332
+ elif field.label == _FieldDescriptor.LABEL_REPEATED:
1333
+ for i in range(len(value)):
1334
+ element = value[i]
1335
+ prefix = '%s[%d].' % (name, i)
1336
+ sub_errors = element.FindInitializationErrors()
1337
+ errors += [prefix + error for error in sub_errors]
1338
+ else:
1339
+ prefix = name + '.'
1340
+ sub_errors = value.FindInitializationErrors()
1341
+ errors += [prefix + error for error in sub_errors]
1342
+
1343
+ return errors
1344
+
1345
+ cls.FindInitializationErrors = FindInitializationErrors
1346
+
1347
+
1348
+ def _FullyQualifiedClassName(klass):
1349
+ module = klass.__module__
1350
+ name = getattr(klass, '__qualname__', klass.__name__)
1351
+ if module in (None, 'builtins', '__builtin__'):
1352
+ return name
1353
+ return module + '.' + name
1354
+
1355
+
1356
+ def _AddMergeFromMethod(cls):
1357
+ LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
1358
+ CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
1359
+
1360
+ def MergeFrom(self, msg):
1361
+ if not isinstance(msg, cls):
1362
+ raise TypeError(
1363
+ 'Parameter to MergeFrom() must be instance of same class: '
1364
+ 'expected %s got %s.' % (_FullyQualifiedClassName(cls),
1365
+ _FullyQualifiedClassName(msg.__class__)))
1366
+
1367
+ assert msg is not self
1368
+ self._Modified()
1369
+
1370
+ fields = self._fields
1371
+
1372
+ for field, value in msg._fields.items():
1373
+ if field.label == LABEL_REPEATED:
1374
+ field_value = fields.get(field)
1375
+ if field_value is None:
1376
+ # Construct a new object to represent this field.
1377
+ field_value = field._default_constructor(self)
1378
+ fields[field] = field_value
1379
+ field_value.MergeFrom(value)
1380
+ elif field.cpp_type == CPPTYPE_MESSAGE:
1381
+ if value._is_present_in_parent:
1382
+ field_value = fields.get(field)
1383
+ if field_value is None:
1384
+ # Construct a new object to represent this field.
1385
+ field_value = field._default_constructor(self)
1386
+ fields[field] = field_value
1387
+ field_value.MergeFrom(value)
1388
+ else:
1389
+ self._fields[field] = value
1390
+ if field.containing_oneof:
1391
+ self._UpdateOneofState(field)
1392
+
1393
+ if msg._unknown_fields:
1394
+ if not self._unknown_fields:
1395
+ self._unknown_fields = []
1396
+ self._unknown_fields.extend(msg._unknown_fields)
1397
+
1398
+ cls.MergeFrom = MergeFrom
1399
+
1400
+
1401
+ def _AddWhichOneofMethod(message_descriptor, cls):
1402
+ def WhichOneof(self, oneof_name):
1403
+ """Returns the name of the currently set field inside a oneof, or None."""
1404
+ try:
1405
+ field = message_descriptor.oneofs_by_name[oneof_name]
1406
+ except KeyError:
1407
+ raise ValueError(
1408
+ 'Protocol message has no oneof "%s" field.' % oneof_name)
1409
+
1410
+ nested_field = self._oneofs.get(field, None)
1411
+ if nested_field is not None and self.HasField(nested_field.name):
1412
+ return nested_field.name
1413
+ else:
1414
+ return None
1415
+
1416
+ cls.WhichOneof = WhichOneof
1417
+
1418
+
1419
+ def _Clear(self):
1420
+ # Clear fields.
1421
+ self._fields = {}
1422
+ self._unknown_fields = ()
1423
+
1424
+ self._oneofs = {}
1425
+ self._Modified()
1426
+
1427
+
1428
+ def _UnknownFields(self):
1429
+ raise NotImplementedError('Please use the add-on feaure '
1430
+ 'unknown_fields.UnknownFieldSet(message) in '
1431
+ 'unknown_fields.py instead.')
1432
+
1433
+
1434
+ def _DiscardUnknownFields(self):
1435
+ self._unknown_fields = []
1436
+ for field, value in self.ListFields():
1437
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
1438
+ if _IsMapField(field):
1439
+ if _IsMessageMapField(field):
1440
+ for key in value:
1441
+ value[key].DiscardUnknownFields()
1442
+ elif field.label == _FieldDescriptor.LABEL_REPEATED:
1443
+ for sub_message in value:
1444
+ sub_message.DiscardUnknownFields()
1445
+ else:
1446
+ value.DiscardUnknownFields()
1447
+
1448
+
1449
+ def _SetListener(self, listener):
1450
+ if listener is None:
1451
+ self._listener = message_listener_mod.NullMessageListener()
1452
+ else:
1453
+ self._listener = listener
1454
+
1455
+
1456
+ def _AddMessageMethods(message_descriptor, cls):
1457
+ """Adds implementations of all Message methods to cls."""
1458
+ _AddListFieldsMethod(message_descriptor, cls)
1459
+ _AddHasFieldMethod(message_descriptor, cls)
1460
+ _AddClearFieldMethod(message_descriptor, cls)
1461
+ if message_descriptor.is_extendable:
1462
+ _AddClearExtensionMethod(cls)
1463
+ _AddHasExtensionMethod(cls)
1464
+ _AddEqualsMethod(message_descriptor, cls)
1465
+ _AddStrMethod(message_descriptor, cls)
1466
+ _AddReprMethod(message_descriptor, cls)
1467
+ _AddUnicodeMethod(message_descriptor, cls)
1468
+ _AddContainsMethod(message_descriptor, cls)
1469
+ _AddByteSizeMethod(message_descriptor, cls)
1470
+ _AddSerializeToStringMethod(message_descriptor, cls)
1471
+ _AddSerializePartialToStringMethod(message_descriptor, cls)
1472
+ _AddMergeFromStringMethod(message_descriptor, cls)
1473
+ _AddIsInitializedMethod(message_descriptor, cls)
1474
+ _AddMergeFromMethod(cls)
1475
+ _AddWhichOneofMethod(message_descriptor, cls)
1476
+ # Adds methods which do not depend on cls.
1477
+ cls.Clear = _Clear
1478
+ cls.DiscardUnknownFields = _DiscardUnknownFields
1479
+ cls._SetListener = _SetListener
1480
+
1481
+
1482
+ def _AddPrivateHelperMethods(message_descriptor, cls):
1483
+ """Adds implementation of private helper methods to cls."""
1484
+
1485
+ def Modified(self):
1486
+ """Sets the _cached_byte_size_dirty bit to true,
1487
+ and propagates this to our listener iff this was a state change.
1488
+ """
1489
+
1490
+ # Note: Some callers check _cached_byte_size_dirty before calling
1491
+ # _Modified() as an extra optimization. So, if this method is ever
1492
+ # changed such that it does stuff even when _cached_byte_size_dirty is
1493
+ # already true, the callers need to be updated.
1494
+ if not self._cached_byte_size_dirty:
1495
+ self._cached_byte_size_dirty = True
1496
+ self._listener_for_children.dirty = True
1497
+ self._is_present_in_parent = True
1498
+ self._listener.Modified()
1499
+
1500
+ def _UpdateOneofState(self, field):
1501
+ """Sets field as the active field in its containing oneof.
1502
+
1503
+ Will also delete currently active field in the oneof, if it is different
1504
+ from the argument. Does not mark the message as modified.
1505
+ """
1506
+ other_field = self._oneofs.setdefault(field.containing_oneof, field)
1507
+ if other_field is not field:
1508
+ del self._fields[other_field]
1509
+ self._oneofs[field.containing_oneof] = field
1510
+
1511
+ cls._Modified = Modified
1512
+ cls.SetInParent = Modified
1513
+ cls._UpdateOneofState = _UpdateOneofState
1514
+
1515
+
1516
+ class _Listener(object):
1517
+
1518
+ """MessageListener implementation that a parent message registers with its
1519
+ child message.
1520
+
1521
+ In order to support semantics like:
1522
+
1523
+ foo.bar.baz.moo = 23
1524
+ assert foo.HasField('bar')
1525
+
1526
+ ...child objects must have back references to their parents.
1527
+ This helper class is at the heart of this support.
1528
+ """
1529
+
1530
+ def __init__(self, parent_message):
1531
+ """Args:
1532
+ parent_message: The message whose _Modified() method we should call when
1533
+ we receive Modified() messages.
1534
+ """
1535
+ # This listener establishes a back reference from a child (contained) object
1536
+ # to its parent (containing) object. We make this a weak reference to avoid
1537
+ # creating cyclic garbage when the client finishes with the 'parent' object
1538
+ # in the tree.
1539
+ if isinstance(parent_message, weakref.ProxyType):
1540
+ self._parent_message_weakref = parent_message
1541
+ else:
1542
+ self._parent_message_weakref = weakref.proxy(parent_message)
1543
+
1544
+ # As an optimization, we also indicate directly on the listener whether
1545
+ # or not the parent message is dirty. This way we can avoid traversing
1546
+ # up the tree in the common case.
1547
+ self.dirty = False
1548
+
1549
+ def Modified(self):
1550
+ if self.dirty:
1551
+ return
1552
+ try:
1553
+ # Propagate the signal to our parents iff this is the first field set.
1554
+ self._parent_message_weakref._Modified()
1555
+ except ReferenceError:
1556
+ # We can get here if a client has kept a reference to a child object,
1557
+ # and is now setting a field on it, but the child's parent has been
1558
+ # garbage-collected. This is not an error.
1559
+ pass
1560
+
1561
+
1562
+ class _OneofListener(_Listener):
1563
+ """Special listener implementation for setting composite oneof fields."""
1564
+
1565
+ def __init__(self, parent_message, field):
1566
+ """Args:
1567
+ parent_message: The message whose _Modified() method we should call when
1568
+ we receive Modified() messages.
1569
+ field: The descriptor of the field being set in the parent message.
1570
+ """
1571
+ super(_OneofListener, self).__init__(parent_message)
1572
+ self._field = field
1573
+
1574
+ def Modified(self):
1575
+ """Also updates the state of the containing oneof in the parent message."""
1576
+ try:
1577
+ self._parent_message_weakref._UpdateOneofState(self._field)
1578
+ super(_OneofListener, self).Modified()
1579
+ except ReferenceError:
1580
+ pass
parrot/lib/python3.10/site-packages/google/protobuf/internal/testing_refleaks.py ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A subclass of unittest.TestCase which checks for reference leaks.
9
+
10
+ To use:
11
+ - Use testing_refleak.BaseTestCase instead of unittest.TestCase
12
+ - Configure and compile Python with --with-pydebug
13
+
14
+ If sys.gettotalrefcount() is not available (because Python was built without
15
+ the Py_DEBUG option), then this module is a no-op and tests will run normally.
16
+ """
17
+
18
+ import copyreg
19
+ import gc
20
+ import sys
21
+ import unittest
22
+
23
+
24
+ class LocalTestResult(unittest.TestResult):
25
+ """A TestResult which forwards events to a parent object, except for Skips."""
26
+
27
+ def __init__(self, parent_result):
28
+ unittest.TestResult.__init__(self)
29
+ self.parent_result = parent_result
30
+
31
+ def addError(self, test, error):
32
+ self.parent_result.addError(test, error)
33
+
34
+ def addFailure(self, test, error):
35
+ self.parent_result.addFailure(test, error)
36
+
37
+ def addSkip(self, test, reason):
38
+ pass
39
+
40
+
41
+ class ReferenceLeakCheckerMixin(object):
42
+ """A mixin class for TestCase, which checks reference counts."""
43
+
44
+ NB_RUNS = 3
45
+
46
+ def run(self, result=None):
47
+ testMethod = getattr(self, self._testMethodName)
48
+ expecting_failure_method = getattr(testMethod, "__unittest_expecting_failure__", False)
49
+ expecting_failure_class = getattr(self, "__unittest_expecting_failure__", False)
50
+ if expecting_failure_class or expecting_failure_method:
51
+ return
52
+
53
+ # python_message.py registers all Message classes to some pickle global
54
+ # registry, which makes the classes immortal.
55
+ # We save a copy of this registry, and reset it before we could references.
56
+ self._saved_pickle_registry = copyreg.dispatch_table.copy()
57
+
58
+ # Run the test twice, to warm up the instance attributes.
59
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
60
+ super(ReferenceLeakCheckerMixin, self).run(result=result)
61
+
62
+ oldrefcount = 0
63
+ local_result = LocalTestResult(result)
64
+ num_flakes = 0
65
+
66
+ refcount_deltas = []
67
+ while len(refcount_deltas) < self.NB_RUNS:
68
+ oldrefcount = self._getRefcounts()
69
+ super(ReferenceLeakCheckerMixin, self).run(result=local_result)
70
+ newrefcount = self._getRefcounts()
71
+ # If the GC was able to collect some objects after the call to run() that
72
+ # it could not collect before the call, then the counts won't match.
73
+ if newrefcount < oldrefcount and num_flakes < 2:
74
+ # This result is (probably) a flake -- garbage collectors aren't very
75
+ # predictable, but a lower ending refcount is the opposite of the
76
+ # failure we are testing for. If the result is repeatable, then we will
77
+ # eventually report it, but not after trying to eliminate it.
78
+ num_flakes += 1
79
+ continue
80
+ num_flakes = 0
81
+ refcount_deltas.append(newrefcount - oldrefcount)
82
+ print(refcount_deltas, self)
83
+
84
+ try:
85
+ self.assertEqual(refcount_deltas, [0] * self.NB_RUNS)
86
+ except Exception: # pylint: disable=broad-except
87
+ result.addError(self, sys.exc_info())
88
+
89
+ def _getRefcounts(self):
90
+ copyreg.dispatch_table.clear()
91
+ copyreg.dispatch_table.update(self._saved_pickle_registry)
92
+ # It is sometimes necessary to gc.collect() multiple times, to ensure
93
+ # that all objects can be collected.
94
+ gc.collect()
95
+ gc.collect()
96
+ gc.collect()
97
+ return sys.gettotalrefcount()
98
+
99
+
100
+ if hasattr(sys, 'gettotalrefcount'):
101
+
102
+ def TestCase(test_class):
103
+ new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__
104
+ new_class = type(test_class)(
105
+ test_class.__name__, new_bases, dict(test_class.__dict__))
106
+ return new_class
107
+ SkipReferenceLeakChecker = unittest.skip
108
+
109
+ else:
110
+ # When PyDEBUG is not enabled, run the tests normally.
111
+
112
+ def TestCase(test_class):
113
+ return test_class
114
+
115
+ def SkipReferenceLeakChecker(reason):
116
+ del reason # Don't skip, so don't need a reason.
117
+ def Same(func):
118
+ return func
119
+ return Same
parrot/lib/python3.10/site-packages/google/protobuf/internal/type_checkers.py ADDED
@@ -0,0 +1,408 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides type checking routines.
9
+
10
+ This module defines type checking utilities in the forms of dictionaries:
11
+
12
+ VALUE_CHECKERS: A dictionary of field types and a value validation object.
13
+ TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
14
+ function.
15
+ TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
16
+ function.
17
+ FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
18
+ corresponding wire types.
19
+ TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
20
+ function.
21
+ """
22
+
23
+ __author__ = 'robinson@google.com (Will Robinson)'
24
+
25
+ import struct
26
+ import numbers
27
+
28
+ from google.protobuf.internal import decoder
29
+ from google.protobuf.internal import encoder
30
+ from google.protobuf.internal import wire_format
31
+ from google.protobuf import descriptor
32
+
33
+ _FieldDescriptor = descriptor.FieldDescriptor
34
+
35
+
36
+ def TruncateToFourByteFloat(original):
37
+ return struct.unpack('<f', struct.pack('<f', original))[0]
38
+
39
+
40
+ def ToShortestFloat(original):
41
+ """Returns the shortest float that has same value in wire."""
42
+ # All 4 byte floats have between 6 and 9 significant digits, so we
43
+ # start with 6 as the lower bound.
44
+ # It has to be iterative because use '.9g' directly can not get rid
45
+ # of the noises for most values. For example if set a float_field=0.9
46
+ # use '.9g' will print 0.899999976.
47
+ precision = 6
48
+ rounded = float('{0:.{1}g}'.format(original, precision))
49
+ while TruncateToFourByteFloat(rounded) != original:
50
+ precision += 1
51
+ rounded = float('{0:.{1}g}'.format(original, precision))
52
+ return rounded
53
+
54
+
55
+ def GetTypeChecker(field):
56
+ """Returns a type checker for a message field of the specified types.
57
+
58
+ Args:
59
+ field: FieldDescriptor object for this field.
60
+
61
+ Returns:
62
+ An instance of TypeChecker which can be used to verify the types
63
+ of values assigned to a field of the specified type.
64
+ """
65
+ if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
66
+ field.type == _FieldDescriptor.TYPE_STRING):
67
+ return UnicodeValueChecker()
68
+ if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
69
+ if field.enum_type.is_closed:
70
+ return EnumValueChecker(field.enum_type)
71
+ else:
72
+ # When open enums are supported, any int32 can be assigned.
73
+ return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
74
+ return _VALUE_CHECKERS[field.cpp_type]
75
+
76
+
77
+ # None of the typecheckers below make any attempt to guard against people
78
+ # subclassing builtin types and doing weird things. We're not trying to
79
+ # protect against malicious clients here, just people accidentally shooting
80
+ # themselves in the foot in obvious ways.
81
+ class TypeChecker(object):
82
+
83
+ """Type checker used to catch type errors as early as possible
84
+ when the client is setting scalar fields in protocol messages.
85
+ """
86
+
87
+ def __init__(self, *acceptable_types):
88
+ self._acceptable_types = acceptable_types
89
+
90
+ def CheckValue(self, proposed_value):
91
+ """Type check the provided value and return it.
92
+
93
+ The returned value might have been normalized to another type.
94
+ """
95
+ if not isinstance(proposed_value, self._acceptable_types):
96
+ message = ('%.1024r has type %s, but expected one of: %s' %
97
+ (proposed_value, type(proposed_value), self._acceptable_types))
98
+ raise TypeError(message)
99
+ return proposed_value
100
+
101
+
102
+ class TypeCheckerWithDefault(TypeChecker):
103
+
104
+ def __init__(self, default_value, *acceptable_types):
105
+ TypeChecker.__init__(self, *acceptable_types)
106
+ self._default_value = default_value
107
+
108
+ def DefaultValue(self):
109
+ return self._default_value
110
+
111
+
112
+ class BoolValueChecker(object):
113
+ """Type checker used for bool fields."""
114
+
115
+ def CheckValue(self, proposed_value):
116
+ if not hasattr(proposed_value, '__index__') or (
117
+ type(proposed_value).__module__ == 'numpy' and
118
+ type(proposed_value).__name__ == 'ndarray'):
119
+ message = ('%.1024r has type %s, but expected one of: %s' %
120
+ (proposed_value, type(proposed_value), (bool, int)))
121
+ raise TypeError(message)
122
+ return bool(proposed_value)
123
+
124
+ def DefaultValue(self):
125
+ return False
126
+
127
+
128
+ # IntValueChecker and its subclasses perform integer type-checks
129
+ # and bounds-checks.
130
+ class IntValueChecker(object):
131
+
132
+ """Checker used for integer fields. Performs type-check and range check."""
133
+
134
+ def CheckValue(self, proposed_value):
135
+ if not hasattr(proposed_value, '__index__') or (
136
+ type(proposed_value).__module__ == 'numpy' and
137
+ type(proposed_value).__name__ == 'ndarray'):
138
+ message = ('%.1024r has type %s, but expected one of: %s' %
139
+ (proposed_value, type(proposed_value), (int,)))
140
+ raise TypeError(message)
141
+
142
+ if not self._MIN <= int(proposed_value) <= self._MAX:
143
+ raise ValueError('Value out of range: %d' % proposed_value)
144
+ # We force all values to int to make alternate implementations where the
145
+ # distinction is more significant (e.g. the C++ implementation) simpler.
146
+ proposed_value = int(proposed_value)
147
+ return proposed_value
148
+
149
+ def DefaultValue(self):
150
+ return 0
151
+
152
+
153
+ class EnumValueChecker(object):
154
+
155
+ """Checker used for enum fields. Performs type-check and range check."""
156
+
157
+ def __init__(self, enum_type):
158
+ self._enum_type = enum_type
159
+
160
+ def CheckValue(self, proposed_value):
161
+ if not isinstance(proposed_value, numbers.Integral):
162
+ message = ('%.1024r has type %s, but expected one of: %s' %
163
+ (proposed_value, type(proposed_value), (int,)))
164
+ raise TypeError(message)
165
+ if int(proposed_value) not in self._enum_type.values_by_number:
166
+ raise ValueError('Unknown enum value: %d' % proposed_value)
167
+ return proposed_value
168
+
169
+ def DefaultValue(self):
170
+ return self._enum_type.values[0].number
171
+
172
+
173
+ class UnicodeValueChecker(object):
174
+
175
+ """Checker used for string fields.
176
+
177
+ Always returns a unicode value, even if the input is of type str.
178
+ """
179
+
180
+ def CheckValue(self, proposed_value):
181
+ if not isinstance(proposed_value, (bytes, str)):
182
+ message = ('%.1024r has type %s, but expected one of: %s' %
183
+ (proposed_value, type(proposed_value), (bytes, str)))
184
+ raise TypeError(message)
185
+
186
+ # If the value is of type 'bytes' make sure that it is valid UTF-8 data.
187
+ if isinstance(proposed_value, bytes):
188
+ try:
189
+ proposed_value = proposed_value.decode('utf-8')
190
+ except UnicodeDecodeError:
191
+ raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
192
+ 'encoding. Non-UTF-8 strings must be converted to '
193
+ 'unicode objects before being added.' %
194
+ (proposed_value))
195
+ else:
196
+ try:
197
+ proposed_value.encode('utf8')
198
+ except UnicodeEncodeError:
199
+ raise ValueError('%.1024r isn\'t a valid unicode string and '
200
+ 'can\'t be encoded in UTF-8.'%
201
+ (proposed_value))
202
+
203
+ return proposed_value
204
+
205
+ def DefaultValue(self):
206
+ return u""
207
+
208
+
209
+ class Int32ValueChecker(IntValueChecker):
210
+ # We're sure to use ints instead of longs here since comparison may be more
211
+ # efficient.
212
+ _MIN = -2147483648
213
+ _MAX = 2147483647
214
+
215
+
216
+ class Uint32ValueChecker(IntValueChecker):
217
+ _MIN = 0
218
+ _MAX = (1 << 32) - 1
219
+
220
+
221
+ class Int64ValueChecker(IntValueChecker):
222
+ _MIN = -(1 << 63)
223
+ _MAX = (1 << 63) - 1
224
+
225
+
226
+ class Uint64ValueChecker(IntValueChecker):
227
+ _MIN = 0
228
+ _MAX = (1 << 64) - 1
229
+
230
+
231
+ # The max 4 bytes float is about 3.4028234663852886e+38
232
+ _FLOAT_MAX = float.fromhex('0x1.fffffep+127')
233
+ _FLOAT_MIN = -_FLOAT_MAX
234
+ _INF = float('inf')
235
+ _NEG_INF = float('-inf')
236
+
237
+
238
+ class DoubleValueChecker(object):
239
+ """Checker used for double fields.
240
+
241
+ Performs type-check and range check.
242
+ """
243
+
244
+ def CheckValue(self, proposed_value):
245
+ """Check and convert proposed_value to float."""
246
+ if (not hasattr(proposed_value, '__float__') and
247
+ not hasattr(proposed_value, '__index__')) or (
248
+ type(proposed_value).__module__ == 'numpy' and
249
+ type(proposed_value).__name__ == 'ndarray'):
250
+ message = ('%.1024r has type %s, but expected one of: int, float' %
251
+ (proposed_value, type(proposed_value)))
252
+ raise TypeError(message)
253
+ return float(proposed_value)
254
+
255
+ def DefaultValue(self):
256
+ return 0.0
257
+
258
+
259
+ class FloatValueChecker(DoubleValueChecker):
260
+ """Checker used for float fields.
261
+
262
+ Performs type-check and range check.
263
+
264
+ Values exceeding a 32-bit float will be converted to inf/-inf.
265
+ """
266
+
267
+ def CheckValue(self, proposed_value):
268
+ """Check and convert proposed_value to float."""
269
+ converted_value = super().CheckValue(proposed_value)
270
+ # This inf rounding matches the C++ proto SafeDoubleToFloat logic.
271
+ if converted_value > _FLOAT_MAX:
272
+ return _INF
273
+ if converted_value < _FLOAT_MIN:
274
+ return _NEG_INF
275
+
276
+ return TruncateToFourByteFloat(converted_value)
277
+
278
+ # Type-checkers for all scalar CPPTYPEs.
279
+ _VALUE_CHECKERS = {
280
+ _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
281
+ _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
282
+ _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
283
+ _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
284
+ _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(),
285
+ _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
286
+ _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(),
287
+ _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
288
+ }
289
+
290
+
291
+ # Map from field type to a function F, such that F(field_num, value)
292
+ # gives the total byte size for a value of the given type. This
293
+ # byte size includes tag information and any other additional space
294
+ # associated with serializing "value".
295
+ TYPE_TO_BYTE_SIZE_FN = {
296
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
297
+ _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
298
+ _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
299
+ _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
300
+ _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
301
+ _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
302
+ _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
303
+ _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
304
+ _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
305
+ _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
306
+ _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
307
+ _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
308
+ _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
309
+ _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
310
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
311
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
312
+ _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
313
+ _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
314
+ }
315
+
316
+
317
+ # Maps from field types to encoder constructors.
318
+ TYPE_TO_ENCODER = {
319
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
320
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
321
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
322
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
323
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
324
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
325
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
326
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
327
+ _FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
328
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
329
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
330
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
331
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
332
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
333
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
334
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
335
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
336
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
337
+ }
338
+
339
+
340
+ # Maps from field types to sizer constructors.
341
+ TYPE_TO_SIZER = {
342
+ _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
343
+ _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
344
+ _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
345
+ _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
346
+ _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
347
+ _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
348
+ _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
349
+ _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
350
+ _FieldDescriptor.TYPE_STRING: encoder.StringSizer,
351
+ _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
352
+ _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
353
+ _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
354
+ _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
355
+ _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
356
+ _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
357
+ _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
358
+ _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
359
+ _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
360
+ }
361
+
362
+
363
+ # Maps from field type to a decoder constructor.
364
+ TYPE_TO_DECODER = {
365
+ _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
366
+ _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
367
+ _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
368
+ _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
369
+ _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
370
+ _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
371
+ _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
372
+ _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
373
+ _FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
374
+ _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
375
+ _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
376
+ _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
377
+ _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
378
+ _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
379
+ _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
380
+ _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
381
+ _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
382
+ _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
383
+ }
384
+
385
+ # Maps from field type to expected wiretype.
386
+ FIELD_TYPE_TO_WIRE_TYPE = {
387
+ _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
388
+ _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
389
+ _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
390
+ _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
391
+ _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
392
+ _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
393
+ _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
394
+ _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
395
+ _FieldDescriptor.TYPE_STRING:
396
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
397
+ _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
398
+ _FieldDescriptor.TYPE_MESSAGE:
399
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
400
+ _FieldDescriptor.TYPE_BYTES:
401
+ wire_format.WIRETYPE_LENGTH_DELIMITED,
402
+ _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
403
+ _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
404
+ _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
405
+ _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
406
+ _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
407
+ _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
408
+ }
parrot/lib/python3.10/site-packages/google/protobuf/internal/well_known_types.py ADDED
@@ -0,0 +1,678 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains well known classes.
9
+
10
+ This files defines well known classes which need extra maintenance including:
11
+ - Any
12
+ - Duration
13
+ - FieldMask
14
+ - Struct
15
+ - Timestamp
16
+ """
17
+
18
+ __author__ = 'jieluo@google.com (Jie Luo)'
19
+
20
+ import calendar
21
+ import collections.abc
22
+ import datetime
23
+ import warnings
24
+ from google.protobuf.internal import field_mask
25
+ from typing import Union
26
+
27
+ FieldMask = field_mask.FieldMask
28
+
29
+ _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
30
+ _NANOS_PER_SECOND = 1000000000
31
+ _NANOS_PER_MILLISECOND = 1000000
32
+ _NANOS_PER_MICROSECOND = 1000
33
+ _MILLIS_PER_SECOND = 1000
34
+ _MICROS_PER_SECOND = 1000000
35
+ _SECONDS_PER_DAY = 24 * 3600
36
+ _DURATION_SECONDS_MAX = 315576000000
37
+ _TIMESTAMP_SECONDS_MIN = -62135596800
38
+ _TIMESTAMP_SECONDS_MAX = 253402300799
39
+
40
+ _EPOCH_DATETIME_NAIVE = datetime.datetime(1970, 1, 1, tzinfo=None)
41
+ _EPOCH_DATETIME_AWARE = _EPOCH_DATETIME_NAIVE.replace(
42
+ tzinfo=datetime.timezone.utc
43
+ )
44
+
45
+
46
+ class Any(object):
47
+ """Class for Any Message type."""
48
+
49
+ __slots__ = ()
50
+
51
+ def Pack(self, msg, type_url_prefix='type.googleapis.com/',
52
+ deterministic=None):
53
+ """Packs the specified message into current Any message."""
54
+ if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
55
+ self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
56
+ else:
57
+ self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
58
+ self.value = msg.SerializeToString(deterministic=deterministic)
59
+
60
+ def Unpack(self, msg):
61
+ """Unpacks the current Any message into specified message."""
62
+ descriptor = msg.DESCRIPTOR
63
+ if not self.Is(descriptor):
64
+ return False
65
+ msg.ParseFromString(self.value)
66
+ return True
67
+
68
+ def TypeName(self):
69
+ """Returns the protobuf type name of the inner message."""
70
+ # Only last part is to be used: b/25630112
71
+ return self.type_url.split('/')[-1]
72
+
73
+ def Is(self, descriptor):
74
+ """Checks if this Any represents the given protobuf type."""
75
+ return '/' in self.type_url and self.TypeName() == descriptor.full_name
76
+
77
+
78
+ class Timestamp(object):
79
+ """Class for Timestamp message type."""
80
+
81
+ __slots__ = ()
82
+
83
+ def ToJsonString(self):
84
+ """Converts Timestamp to RFC 3339 date string format.
85
+
86
+ Returns:
87
+ A string converted from timestamp. The string is always Z-normalized
88
+ and uses 3, 6 or 9 fractional digits as required to represent the
89
+ exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
90
+ """
91
+ _CheckTimestampValid(self.seconds, self.nanos)
92
+ nanos = self.nanos
93
+ seconds = self.seconds % _SECONDS_PER_DAY
94
+ days = (self.seconds - seconds) // _SECONDS_PER_DAY
95
+ dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds)
96
+
97
+ result = dt.isoformat()
98
+ if (nanos % 1e9) == 0:
99
+ # If there are 0 fractional digits, the fractional
100
+ # point '.' should be omitted when serializing.
101
+ return result + 'Z'
102
+ if (nanos % 1e6) == 0:
103
+ # Serialize 3 fractional digits.
104
+ return result + '.%03dZ' % (nanos / 1e6)
105
+ if (nanos % 1e3) == 0:
106
+ # Serialize 6 fractional digits.
107
+ return result + '.%06dZ' % (nanos / 1e3)
108
+ # Serialize 9 fractional digits.
109
+ return result + '.%09dZ' % nanos
110
+
111
+ def FromJsonString(self, value):
112
+ """Parse a RFC 3339 date string format to Timestamp.
113
+
114
+ Args:
115
+ value: A date string. Any fractional digits (or none) and any offset are
116
+ accepted as long as they fit into nano-seconds precision.
117
+ Example of accepted format: '1972-01-01T10:00:20.021-05:00'
118
+
119
+ Raises:
120
+ ValueError: On parsing problems.
121
+ """
122
+ if not isinstance(value, str):
123
+ raise ValueError('Timestamp JSON value not a string: {!r}'.format(value))
124
+ timezone_offset = value.find('Z')
125
+ if timezone_offset == -1:
126
+ timezone_offset = value.find('+')
127
+ if timezone_offset == -1:
128
+ timezone_offset = value.rfind('-')
129
+ if timezone_offset == -1:
130
+ raise ValueError(
131
+ 'Failed to parse timestamp: missing valid timezone offset.')
132
+ time_value = value[0:timezone_offset]
133
+ # Parse datetime and nanos.
134
+ point_position = time_value.find('.')
135
+ if point_position == -1:
136
+ second_value = time_value
137
+ nano_value = ''
138
+ else:
139
+ second_value = time_value[:point_position]
140
+ nano_value = time_value[point_position + 1:]
141
+ if 't' in second_value:
142
+ raise ValueError(
143
+ 'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', '
144
+ 'lowercase \'t\' is not accepted'.format(second_value))
145
+ date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT)
146
+ td = date_object - datetime.datetime(1970, 1, 1)
147
+ seconds = td.seconds + td.days * _SECONDS_PER_DAY
148
+ if len(nano_value) > 9:
149
+ raise ValueError(
150
+ 'Failed to parse Timestamp: nanos {0} more than '
151
+ '9 fractional digits.'.format(nano_value))
152
+ if nano_value:
153
+ nanos = round(float('0.' + nano_value) * 1e9)
154
+ else:
155
+ nanos = 0
156
+ # Parse timezone offsets.
157
+ if value[timezone_offset] == 'Z':
158
+ if len(value) != timezone_offset + 1:
159
+ raise ValueError('Failed to parse timestamp: invalid trailing'
160
+ ' data {0}.'.format(value))
161
+ else:
162
+ timezone = value[timezone_offset:]
163
+ pos = timezone.find(':')
164
+ if pos == -1:
165
+ raise ValueError(
166
+ 'Invalid timezone offset value: {0}.'.format(timezone))
167
+ if timezone[0] == '+':
168
+ seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
169
+ else:
170
+ seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
171
+ # Set seconds and nanos
172
+ _CheckTimestampValid(seconds, nanos)
173
+ self.seconds = int(seconds)
174
+ self.nanos = int(nanos)
175
+
176
+ def GetCurrentTime(self):
177
+ """Get the current UTC into Timestamp."""
178
+ self.FromDatetime(datetime.datetime.utcnow())
179
+
180
+ def ToNanoseconds(self):
181
+ """Converts Timestamp to nanoseconds since epoch."""
182
+ _CheckTimestampValid(self.seconds, self.nanos)
183
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
184
+
185
+ def ToMicroseconds(self):
186
+ """Converts Timestamp to microseconds since epoch."""
187
+ _CheckTimestampValid(self.seconds, self.nanos)
188
+ return (self.seconds * _MICROS_PER_SECOND +
189
+ self.nanos // _NANOS_PER_MICROSECOND)
190
+
191
+ def ToMilliseconds(self):
192
+ """Converts Timestamp to milliseconds since epoch."""
193
+ _CheckTimestampValid(self.seconds, self.nanos)
194
+ return (self.seconds * _MILLIS_PER_SECOND +
195
+ self.nanos // _NANOS_PER_MILLISECOND)
196
+
197
+ def ToSeconds(self):
198
+ """Converts Timestamp to seconds since epoch."""
199
+ _CheckTimestampValid(self.seconds, self.nanos)
200
+ return self.seconds
201
+
202
+ def FromNanoseconds(self, nanos):
203
+ """Converts nanoseconds since epoch to Timestamp."""
204
+ seconds = nanos // _NANOS_PER_SECOND
205
+ nanos = nanos % _NANOS_PER_SECOND
206
+ _CheckTimestampValid(seconds, nanos)
207
+ self.seconds = seconds
208
+ self.nanos = nanos
209
+
210
+ def FromMicroseconds(self, micros):
211
+ """Converts microseconds since epoch to Timestamp."""
212
+ seconds = micros // _MICROS_PER_SECOND
213
+ nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
214
+ _CheckTimestampValid(seconds, nanos)
215
+ self.seconds = seconds
216
+ self.nanos = nanos
217
+
218
+ def FromMilliseconds(self, millis):
219
+ """Converts milliseconds since epoch to Timestamp."""
220
+ seconds = millis // _MILLIS_PER_SECOND
221
+ nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
222
+ _CheckTimestampValid(seconds, nanos)
223
+ self.seconds = seconds
224
+ self.nanos = nanos
225
+
226
+ def FromSeconds(self, seconds):
227
+ """Converts seconds since epoch to Timestamp."""
228
+ _CheckTimestampValid(seconds, 0)
229
+ self.seconds = seconds
230
+ self.nanos = 0
231
+
232
+ def ToDatetime(self, tzinfo=None):
233
+ """Converts Timestamp to a datetime.
234
+
235
+ Args:
236
+ tzinfo: A datetime.tzinfo subclass; defaults to None.
237
+
238
+ Returns:
239
+ If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone
240
+ information, i.e. not aware that it's UTC).
241
+
242
+ Otherwise, returns a timezone-aware datetime in the input timezone.
243
+ """
244
+ # Using datetime.fromtimestamp for this would avoid constructing an extra
245
+ # timedelta object and possibly an extra datetime. Unfortuantely, that has
246
+ # the disadvantage of not handling the full precision (on all platforms, see
247
+ # https://github.com/python/cpython/issues/109849) or full range (on some
248
+ # platforms, see https://github.com/python/cpython/issues/110042) of
249
+ # datetime.
250
+ _CheckTimestampValid(self.seconds, self.nanos)
251
+ delta = datetime.timedelta(
252
+ seconds=self.seconds,
253
+ microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND),
254
+ )
255
+ if tzinfo is None:
256
+ return _EPOCH_DATETIME_NAIVE + delta
257
+ else:
258
+ # Note the tz conversion has to come after the timedelta arithmetic.
259
+ return (_EPOCH_DATETIME_AWARE + delta).astimezone(tzinfo)
260
+
261
+ def FromDatetime(self, dt):
262
+ """Converts datetime to Timestamp.
263
+
264
+ Args:
265
+ dt: A datetime. If it's timezone-naive, it's assumed to be in UTC.
266
+ """
267
+ # Using this guide: http://wiki.python.org/moin/WorkingWithTime
268
+ # And this conversion guide: http://docs.python.org/library/time.html
269
+
270
+ # Turn the date parameter into a tuple (struct_time) that can then be
271
+ # manipulated into a long value of seconds. During the conversion from
272
+ # struct_time to long, the source date in UTC, and so it follows that the
273
+ # correct transformation is calendar.timegm()
274
+ try:
275
+ seconds = calendar.timegm(dt.utctimetuple())
276
+ nanos = dt.microsecond * _NANOS_PER_MICROSECOND
277
+ except AttributeError as e:
278
+ raise AttributeError(
279
+ 'Fail to convert to Timestamp. Expected a datetime like '
280
+ 'object got {0} : {1}'.format(type(dt).__name__, e)
281
+ ) from e
282
+ _CheckTimestampValid(seconds, nanos)
283
+ self.seconds = seconds
284
+ self.nanos = nanos
285
+
286
+ def _internal_assign(self, dt):
287
+ self.FromDatetime(dt)
288
+
289
+ def __add__(self, value) -> datetime.datetime:
290
+ if isinstance(value, Duration):
291
+ return self.ToDatetime() + value.ToTimedelta()
292
+ return self.ToDatetime() + value
293
+
294
+ __radd__ = __add__
295
+
296
+ def __sub__(self, value) -> Union[datetime.datetime, datetime.timedelta]:
297
+ if isinstance(value, Timestamp):
298
+ return self.ToDatetime() - value.ToDatetime()
299
+ elif isinstance(value, Duration):
300
+ return self.ToDatetime() - value.ToTimedelta()
301
+ return self.ToDatetime() - value
302
+
303
+ def __rsub__(self, dt) -> datetime.timedelta:
304
+ return dt - self.ToDatetime()
305
+
306
+
307
+ def _CheckTimestampValid(seconds, nanos):
308
+ if seconds < _TIMESTAMP_SECONDS_MIN or seconds > _TIMESTAMP_SECONDS_MAX:
309
+ raise ValueError(
310
+ 'Timestamp is not valid: Seconds {0} must be in range '
311
+ '[-62135596800, 253402300799].'.format(seconds))
312
+ if nanos < 0 or nanos >= _NANOS_PER_SECOND:
313
+ raise ValueError(
314
+ 'Timestamp is not valid: Nanos {} must be in a range '
315
+ '[0, 999999].'.format(nanos))
316
+
317
+
318
+ class Duration(object):
319
+ """Class for Duration message type."""
320
+
321
+ __slots__ = ()
322
+
323
+ def ToJsonString(self):
324
+ """Converts Duration to string format.
325
+
326
+ Returns:
327
+ A string converted from self. The string format will contains
328
+ 3, 6, or 9 fractional digits depending on the precision required to
329
+ represent the exact Duration value. For example: "1s", "1.010s",
330
+ "1.000000100s", "-3.100s"
331
+ """
332
+ _CheckDurationValid(self.seconds, self.nanos)
333
+ if self.seconds < 0 or self.nanos < 0:
334
+ result = '-'
335
+ seconds = - self.seconds + int((0 - self.nanos) // 1e9)
336
+ nanos = (0 - self.nanos) % 1e9
337
+ else:
338
+ result = ''
339
+ seconds = self.seconds + int(self.nanos // 1e9)
340
+ nanos = self.nanos % 1e9
341
+ result += '%d' % seconds
342
+ if (nanos % 1e9) == 0:
343
+ # If there are 0 fractional digits, the fractional
344
+ # point '.' should be omitted when serializing.
345
+ return result + 's'
346
+ if (nanos % 1e6) == 0:
347
+ # Serialize 3 fractional digits.
348
+ return result + '.%03ds' % (nanos / 1e6)
349
+ if (nanos % 1e3) == 0:
350
+ # Serialize 6 fractional digits.
351
+ return result + '.%06ds' % (nanos / 1e3)
352
+ # Serialize 9 fractional digits.
353
+ return result + '.%09ds' % nanos
354
+
355
+ def FromJsonString(self, value):
356
+ """Converts a string to Duration.
357
+
358
+ Args:
359
+ value: A string to be converted. The string must end with 's'. Any
360
+ fractional digits (or none) are accepted as long as they fit into
361
+ precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
362
+
363
+ Raises:
364
+ ValueError: On parsing problems.
365
+ """
366
+ if not isinstance(value, str):
367
+ raise ValueError('Duration JSON value not a string: {!r}'.format(value))
368
+ if len(value) < 1 or value[-1] != 's':
369
+ raise ValueError(
370
+ 'Duration must end with letter "s": {0}.'.format(value))
371
+ try:
372
+ pos = value.find('.')
373
+ if pos == -1:
374
+ seconds = int(value[:-1])
375
+ nanos = 0
376
+ else:
377
+ seconds = int(value[:pos])
378
+ if value[0] == '-':
379
+ nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
380
+ else:
381
+ nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
382
+ _CheckDurationValid(seconds, nanos)
383
+ self.seconds = seconds
384
+ self.nanos = nanos
385
+ except ValueError as e:
386
+ raise ValueError(
387
+ 'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
388
+
389
+ def ToNanoseconds(self):
390
+ """Converts a Duration to nanoseconds."""
391
+ return self.seconds * _NANOS_PER_SECOND + self.nanos
392
+
393
+ def ToMicroseconds(self):
394
+ """Converts a Duration to microseconds."""
395
+ micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
396
+ return self.seconds * _MICROS_PER_SECOND + micros
397
+
398
+ def ToMilliseconds(self):
399
+ """Converts a Duration to milliseconds."""
400
+ millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
401
+ return self.seconds * _MILLIS_PER_SECOND + millis
402
+
403
+ def ToSeconds(self):
404
+ """Converts a Duration to seconds."""
405
+ return self.seconds
406
+
407
+ def FromNanoseconds(self, nanos):
408
+ """Converts nanoseconds to Duration."""
409
+ self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
410
+ nanos % _NANOS_PER_SECOND)
411
+
412
+ def FromMicroseconds(self, micros):
413
+ """Converts microseconds to Duration."""
414
+ self._NormalizeDuration(
415
+ micros // _MICROS_PER_SECOND,
416
+ (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
417
+
418
+ def FromMilliseconds(self, millis):
419
+ """Converts milliseconds to Duration."""
420
+ self._NormalizeDuration(
421
+ millis // _MILLIS_PER_SECOND,
422
+ (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
423
+
424
+ def FromSeconds(self, seconds):
425
+ """Converts seconds to Duration."""
426
+ self.seconds = seconds
427
+ self.nanos = 0
428
+
429
+ def ToTimedelta(self) -> datetime.timedelta:
430
+ """Converts Duration to timedelta."""
431
+ return datetime.timedelta(
432
+ seconds=self.seconds, microseconds=_RoundTowardZero(
433
+ self.nanos, _NANOS_PER_MICROSECOND))
434
+
435
+ def FromTimedelta(self, td):
436
+ """Converts timedelta to Duration."""
437
+ try:
438
+ self._NormalizeDuration(
439
+ td.seconds + td.days * _SECONDS_PER_DAY,
440
+ td.microseconds * _NANOS_PER_MICROSECOND,
441
+ )
442
+ except AttributeError as e:
443
+ raise AttributeError(
444
+ 'Fail to convert to Duration. Expected a timedelta like '
445
+ 'object got {0}: {1}'.format(type(td).__name__, e)
446
+ ) from e
447
+
448
+ def _internal_assign(self, td):
449
+ self.FromTimedelta(td)
450
+
451
+ def _NormalizeDuration(self, seconds, nanos):
452
+ """Set Duration by seconds and nanos."""
453
+ # Force nanos to be negative if the duration is negative.
454
+ if seconds < 0 and nanos > 0:
455
+ seconds += 1
456
+ nanos -= _NANOS_PER_SECOND
457
+ self.seconds = seconds
458
+ self.nanos = nanos
459
+
460
+ def __add__(self, value) -> Union[datetime.datetime, datetime.timedelta]:
461
+ if isinstance(value, Timestamp):
462
+ return self.ToTimedelta() + value.ToDatetime()
463
+ return self.ToTimedelta() + value
464
+
465
+ __radd__ = __add__
466
+
467
+ def __rsub__(self, dt) -> Union[datetime.datetime, datetime.timedelta]:
468
+ return dt - self.ToTimedelta()
469
+
470
+
471
+ def _CheckDurationValid(seconds, nanos):
472
+ if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
473
+ raise ValueError(
474
+ 'Duration is not valid: Seconds {0} must be in range '
475
+ '[-315576000000, 315576000000].'.format(seconds))
476
+ if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
477
+ raise ValueError(
478
+ 'Duration is not valid: Nanos {0} must be in range '
479
+ '[-999999999, 999999999].'.format(nanos))
480
+ if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
481
+ raise ValueError(
482
+ 'Duration is not valid: Sign mismatch.')
483
+
484
+
485
+ def _RoundTowardZero(value, divider):
486
+ """Truncates the remainder part after division."""
487
+ # For some languages, the sign of the remainder is implementation
488
+ # dependent if any of the operands is negative. Here we enforce
489
+ # "rounded toward zero" semantics. For example, for (-5) / 2 an
490
+ # implementation may give -3 as the result with the remainder being
491
+ # 1. This function ensures we always return -2 (closer to zero).
492
+ result = value // divider
493
+ remainder = value % divider
494
+ if result < 0 and remainder > 0:
495
+ return result + 1
496
+ else:
497
+ return result
498
+
499
+
500
+ def _SetStructValue(struct_value, value):
501
+ if value is None:
502
+ struct_value.null_value = 0
503
+ elif isinstance(value, bool):
504
+ # Note: this check must come before the number check because in Python
505
+ # True and False are also considered numbers.
506
+ struct_value.bool_value = value
507
+ elif isinstance(value, str):
508
+ struct_value.string_value = value
509
+ elif isinstance(value, (int, float)):
510
+ struct_value.number_value = value
511
+ elif isinstance(value, (dict, Struct)):
512
+ struct_value.struct_value.Clear()
513
+ struct_value.struct_value.update(value)
514
+ elif isinstance(value, (list, tuple, ListValue)):
515
+ struct_value.list_value.Clear()
516
+ struct_value.list_value.extend(value)
517
+ else:
518
+ raise ValueError('Unexpected type')
519
+
520
+
521
+ def _GetStructValue(struct_value):
522
+ which = struct_value.WhichOneof('kind')
523
+ if which == 'struct_value':
524
+ return struct_value.struct_value
525
+ elif which == 'null_value':
526
+ return None
527
+ elif which == 'number_value':
528
+ return struct_value.number_value
529
+ elif which == 'string_value':
530
+ return struct_value.string_value
531
+ elif which == 'bool_value':
532
+ return struct_value.bool_value
533
+ elif which == 'list_value':
534
+ return struct_value.list_value
535
+ elif which is None:
536
+ raise ValueError('Value not set')
537
+
538
+
539
+ class Struct(object):
540
+ """Class for Struct message type."""
541
+
542
+ __slots__ = ()
543
+
544
+ def __getitem__(self, key):
545
+ return _GetStructValue(self.fields[key])
546
+
547
+ def __setitem__(self, key, value):
548
+ _SetStructValue(self.fields[key], value)
549
+
550
+ def __delitem__(self, key):
551
+ del self.fields[key]
552
+
553
+ def __len__(self):
554
+ return len(self.fields)
555
+
556
+ def __iter__(self):
557
+ return iter(self.fields)
558
+
559
+ def _internal_assign(self, dictionary):
560
+ self.Clear()
561
+ self.update(dictionary)
562
+
563
+ def _internal_compare(self, other):
564
+ size = len(self)
565
+ if size != len(other):
566
+ return False
567
+ for key, value in self.items():
568
+ if key not in other:
569
+ return False
570
+ if isinstance(other[key], (dict, list)):
571
+ if not value._internal_compare(other[key]):
572
+ return False
573
+ elif value != other[key]:
574
+ return False
575
+ return True
576
+
577
+ def keys(self): # pylint: disable=invalid-name
578
+ return self.fields.keys()
579
+
580
+ def values(self): # pylint: disable=invalid-name
581
+ return [self[key] for key in self]
582
+
583
+ def items(self): # pylint: disable=invalid-name
584
+ return [(key, self[key]) for key in self]
585
+
586
+ def get_or_create_list(self, key):
587
+ """Returns a list for this key, creating if it didn't exist already."""
588
+ if not self.fields[key].HasField('list_value'):
589
+ # Clear will mark list_value modified which will indeed create a list.
590
+ self.fields[key].list_value.Clear()
591
+ return self.fields[key].list_value
592
+
593
+ def get_or_create_struct(self, key):
594
+ """Returns a struct for this key, creating if it didn't exist already."""
595
+ if not self.fields[key].HasField('struct_value'):
596
+ # Clear will mark struct_value modified which will indeed create a struct.
597
+ self.fields[key].struct_value.Clear()
598
+ return self.fields[key].struct_value
599
+
600
+ def update(self, dictionary): # pylint: disable=invalid-name
601
+ for key, value in dictionary.items():
602
+ _SetStructValue(self.fields[key], value)
603
+
604
+ collections.abc.MutableMapping.register(Struct)
605
+
606
+
607
+ class ListValue(object):
608
+ """Class for ListValue message type."""
609
+
610
+ __slots__ = ()
611
+
612
+ def __len__(self):
613
+ return len(self.values)
614
+
615
+ def append(self, value):
616
+ _SetStructValue(self.values.add(), value)
617
+
618
+ def extend(self, elem_seq):
619
+ for value in elem_seq:
620
+ self.append(value)
621
+
622
+ def __getitem__(self, index):
623
+ """Retrieves item by the specified index."""
624
+ return _GetStructValue(self.values.__getitem__(index))
625
+
626
+ def __setitem__(self, index, value):
627
+ _SetStructValue(self.values.__getitem__(index), value)
628
+
629
+ def __delitem__(self, key):
630
+ del self.values[key]
631
+
632
+ def _internal_assign(self, elem_seq):
633
+ self.Clear()
634
+ self.extend(elem_seq)
635
+
636
+ def _internal_compare(self, other):
637
+ size = len(self)
638
+ if size != len(other):
639
+ return False
640
+ for i in range(size):
641
+ if isinstance(other[i], (dict, list)):
642
+ if not self[i]._internal_compare(other[i]):
643
+ return False
644
+ elif self[i] != other[i]:
645
+ return False
646
+ return True
647
+
648
+ def items(self):
649
+ for i in range(len(self)):
650
+ yield self[i]
651
+
652
+ def add_struct(self):
653
+ """Appends and returns a struct value as the next value in the list."""
654
+ struct_value = self.values.add().struct_value
655
+ # Clear will mark struct_value modified which will indeed create a struct.
656
+ struct_value.Clear()
657
+ return struct_value
658
+
659
+ def add_list(self):
660
+ """Appends and returns a list value as the next value in the list."""
661
+ list_value = self.values.add().list_value
662
+ # Clear will mark list_value modified which will indeed create a list.
663
+ list_value.Clear()
664
+ return list_value
665
+
666
+ collections.abc.MutableSequence.register(ListValue)
667
+
668
+
669
+ # LINT.IfChange(wktbases)
670
+ WKTBASES = {
671
+ 'google.protobuf.Any': Any,
672
+ 'google.protobuf.Duration': Duration,
673
+ 'google.protobuf.FieldMask': FieldMask,
674
+ 'google.protobuf.ListValue': ListValue,
675
+ 'google.protobuf.Struct': Struct,
676
+ 'google.protobuf.Timestamp': Timestamp,
677
+ }
678
+ # LINT.ThenChange(//depot/google.protobuf/compiler/python/pyi_generator.cc:wktbases)
parrot/lib/python3.10/site-packages/google/protobuf/internal/wire_format.py ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Constants and static functions to support protocol buffer wire format."""
9
+
10
+ __author__ = 'robinson@google.com (Will Robinson)'
11
+
12
+ import struct
13
+ from google.protobuf import descriptor
14
+ from google.protobuf import message
15
+
16
+
17
+ TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
18
+ TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
19
+
20
+ # These numbers identify the wire type of a protocol buffer value.
21
+ # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
22
+ # tag-and-type to store one of these WIRETYPE_* constants.
23
+ # These values must match WireType enum in //google/protobuf/wire_format.h.
24
+ WIRETYPE_VARINT = 0
25
+ WIRETYPE_FIXED64 = 1
26
+ WIRETYPE_LENGTH_DELIMITED = 2
27
+ WIRETYPE_START_GROUP = 3
28
+ WIRETYPE_END_GROUP = 4
29
+ WIRETYPE_FIXED32 = 5
30
+ _WIRETYPE_MAX = 5
31
+
32
+
33
+ # Bounds for various integer types.
34
+ INT32_MAX = int((1 << 31) - 1)
35
+ INT32_MIN = int(-(1 << 31))
36
+ UINT32_MAX = (1 << 32) - 1
37
+
38
+ INT64_MAX = (1 << 63) - 1
39
+ INT64_MIN = -(1 << 63)
40
+ UINT64_MAX = (1 << 64) - 1
41
+
42
+ # "struct" format strings that will encode/decode the specified formats.
43
+ FORMAT_UINT32_LITTLE_ENDIAN = '<I'
44
+ FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
45
+ FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
46
+ FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
47
+
48
+
49
+ # We'll have to provide alternate implementations of AppendLittleEndian*() on
50
+ # any architectures where these checks fail.
51
+ if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
52
+ raise AssertionError('Format "I" is not a 32-bit number.')
53
+ if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
54
+ raise AssertionError('Format "Q" is not a 64-bit number.')
55
+
56
+
57
+ def PackTag(field_number, wire_type):
58
+ """Returns an unsigned 32-bit integer that encodes the field number and
59
+ wire type information in standard protocol message wire format.
60
+
61
+ Args:
62
+ field_number: Expected to be an integer in the range [1, 1 << 29)
63
+ wire_type: One of the WIRETYPE_* constants.
64
+ """
65
+ if not 0 <= wire_type <= _WIRETYPE_MAX:
66
+ raise message.EncodeError('Unknown wire type: %d' % wire_type)
67
+ return (field_number << TAG_TYPE_BITS) | wire_type
68
+
69
+
70
+ def UnpackTag(tag):
71
+ """The inverse of PackTag(). Given an unsigned 32-bit number,
72
+ returns a (field_number, wire_type) tuple.
73
+ """
74
+ return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
75
+
76
+
77
+ def ZigZagEncode(value):
78
+ """ZigZag Transform: Encodes signed integers so that they can be
79
+ effectively used with varint encoding. See wire_format.h for
80
+ more details.
81
+ """
82
+ if value >= 0:
83
+ return value << 1
84
+ return (value << 1) ^ (~0)
85
+
86
+
87
+ def ZigZagDecode(value):
88
+ """Inverse of ZigZagEncode()."""
89
+ if not value & 0x1:
90
+ return value >> 1
91
+ return (value >> 1) ^ (~0)
92
+
93
+
94
+
95
+ # The *ByteSize() functions below return the number of bytes required to
96
+ # serialize "field number + type" information and then serialize the value.
97
+
98
+
99
+ def Int32ByteSize(field_number, int32):
100
+ return Int64ByteSize(field_number, int32)
101
+
102
+
103
+ def Int32ByteSizeNoTag(int32):
104
+ return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
105
+
106
+
107
+ def Int64ByteSize(field_number, int64):
108
+ # Have to convert to uint before calling UInt64ByteSize().
109
+ return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
110
+
111
+
112
+ def UInt32ByteSize(field_number, uint32):
113
+ return UInt64ByteSize(field_number, uint32)
114
+
115
+
116
+ def UInt64ByteSize(field_number, uint64):
117
+ return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
118
+
119
+
120
+ def SInt32ByteSize(field_number, int32):
121
+ return UInt32ByteSize(field_number, ZigZagEncode(int32))
122
+
123
+
124
+ def SInt64ByteSize(field_number, int64):
125
+ return UInt64ByteSize(field_number, ZigZagEncode(int64))
126
+
127
+
128
+ def Fixed32ByteSize(field_number, fixed32):
129
+ return TagByteSize(field_number) + 4
130
+
131
+
132
+ def Fixed64ByteSize(field_number, fixed64):
133
+ return TagByteSize(field_number) + 8
134
+
135
+
136
+ def SFixed32ByteSize(field_number, sfixed32):
137
+ return TagByteSize(field_number) + 4
138
+
139
+
140
+ def SFixed64ByteSize(field_number, sfixed64):
141
+ return TagByteSize(field_number) + 8
142
+
143
+
144
+ def FloatByteSize(field_number, flt):
145
+ return TagByteSize(field_number) + 4
146
+
147
+
148
+ def DoubleByteSize(field_number, double):
149
+ return TagByteSize(field_number) + 8
150
+
151
+
152
+ def BoolByteSize(field_number, b):
153
+ return TagByteSize(field_number) + 1
154
+
155
+
156
+ def EnumByteSize(field_number, enum):
157
+ return UInt32ByteSize(field_number, enum)
158
+
159
+
160
+ def StringByteSize(field_number, string):
161
+ return BytesByteSize(field_number, string.encode('utf-8'))
162
+
163
+
164
+ def BytesByteSize(field_number, b):
165
+ return (TagByteSize(field_number)
166
+ + _VarUInt64ByteSizeNoTag(len(b))
167
+ + len(b))
168
+
169
+
170
+ def GroupByteSize(field_number, message):
171
+ return (2 * TagByteSize(field_number) # START and END group.
172
+ + message.ByteSize())
173
+
174
+
175
+ def MessageByteSize(field_number, message):
176
+ return (TagByteSize(field_number)
177
+ + _VarUInt64ByteSizeNoTag(message.ByteSize())
178
+ + message.ByteSize())
179
+
180
+
181
+ def MessageSetItemByteSize(field_number, msg):
182
+ # First compute the sizes of the tags.
183
+ # There are 2 tags for the beginning and ending of the repeated group, that
184
+ # is field number 1, one with field number 2 (type_id) and one with field
185
+ # number 3 (message).
186
+ total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
187
+
188
+ # Add the number of bytes for type_id.
189
+ total_size += _VarUInt64ByteSizeNoTag(field_number)
190
+
191
+ message_size = msg.ByteSize()
192
+
193
+ # The number of bytes for encoding the length of the message.
194
+ total_size += _VarUInt64ByteSizeNoTag(message_size)
195
+
196
+ # The size of the message.
197
+ total_size += message_size
198
+ return total_size
199
+
200
+
201
+ def TagByteSize(field_number):
202
+ """Returns the bytes required to serialize a tag with this field number."""
203
+ # Just pass in type 0, since the type won't affect the tag+type size.
204
+ return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
205
+
206
+
207
+ # Private helper function for the *ByteSize() functions above.
208
+
209
+ def _VarUInt64ByteSizeNoTag(uint64):
210
+ """Returns the number of bytes required to serialize a single varint
211
+ using boundary value comparisons. (unrolled loop optimization -WPierce)
212
+ uint64 must be unsigned.
213
+ """
214
+ if uint64 <= 0x7f: return 1
215
+ if uint64 <= 0x3fff: return 2
216
+ if uint64 <= 0x1fffff: return 3
217
+ if uint64 <= 0xfffffff: return 4
218
+ if uint64 <= 0x7ffffffff: return 5
219
+ if uint64 <= 0x3ffffffffff: return 6
220
+ if uint64 <= 0x1ffffffffffff: return 7
221
+ if uint64 <= 0xffffffffffffff: return 8
222
+ if uint64 <= 0x7fffffffffffffff: return 9
223
+ if uint64 > UINT64_MAX:
224
+ raise message.EncodeError('Value out of range: %d' % uint64)
225
+ return 10
226
+
227
+
228
+ NON_PACKABLE_TYPES = (
229
+ descriptor.FieldDescriptor.TYPE_STRING,
230
+ descriptor.FieldDescriptor.TYPE_GROUP,
231
+ descriptor.FieldDescriptor.TYPE_MESSAGE,
232
+ descriptor.FieldDescriptor.TYPE_BYTES
233
+ )
234
+
235
+
236
+ def IsTypePackable(field_type):
237
+ """Return true iff packable = true is valid for fields of this type.
238
+
239
+ Args:
240
+ field_type: a FieldDescriptor::Type value.
241
+
242
+ Returns:
243
+ True iff fields of this type are packable.
244
+ """
245
+ return field_type not in NON_PACKABLE_TYPES
parrot/lib/python3.10/site-packages/google/protobuf/json_format.py ADDED
@@ -0,0 +1,1069 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains routines for printing protocol messages in JSON format.
9
+
10
+ Simple usage example:
11
+
12
+ # Create a proto object and serialize it to a json format string.
13
+ message = my_proto_pb2.MyMessage(foo='bar')
14
+ json_string = json_format.MessageToJson(message)
15
+
16
+ # Parse a json format string to proto object.
17
+ message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
18
+ """
19
+
20
+ __author__ = 'jieluo@google.com (Jie Luo)'
21
+
22
+
23
+ import base64
24
+ from collections import OrderedDict
25
+ import json
26
+ import math
27
+ from operator import methodcaller
28
+ import re
29
+
30
+ from google.protobuf import descriptor
31
+ from google.protobuf import message_factory
32
+ from google.protobuf import symbol_database
33
+ from google.protobuf.internal import type_checkers
34
+
35
+
36
+ _INT_TYPES = frozenset([
37
+ descriptor.FieldDescriptor.CPPTYPE_INT32,
38
+ descriptor.FieldDescriptor.CPPTYPE_UINT32,
39
+ descriptor.FieldDescriptor.CPPTYPE_INT64,
40
+ descriptor.FieldDescriptor.CPPTYPE_UINT64,
41
+ ])
42
+ _INT64_TYPES = frozenset([
43
+ descriptor.FieldDescriptor.CPPTYPE_INT64,
44
+ descriptor.FieldDescriptor.CPPTYPE_UINT64,
45
+ ])
46
+ _FLOAT_TYPES = frozenset([
47
+ descriptor.FieldDescriptor.CPPTYPE_FLOAT,
48
+ descriptor.FieldDescriptor.CPPTYPE_DOUBLE,
49
+ ])
50
+ _INFINITY = 'Infinity'
51
+ _NEG_INFINITY = '-Infinity'
52
+ _NAN = 'NaN'
53
+
54
+ _UNPAIRED_SURROGATE_PATTERN = re.compile(
55
+ '[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]'
56
+ )
57
+
58
+ _VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
59
+
60
+
61
+ class Error(Exception):
62
+ """Top-level module error for json_format."""
63
+
64
+
65
+ class SerializeToJsonError(Error):
66
+ """Thrown if serialization to JSON fails."""
67
+
68
+
69
+ class ParseError(Error):
70
+ """Thrown in case of parsing error."""
71
+
72
+
73
+ class EnumStringValueParseError(ParseError):
74
+ """Thrown if unknown string enum value is encountered.
75
+ This exception is suppressed if ignore_unknown_fields is set.
76
+ """
77
+
78
+
79
+ def MessageToJson(
80
+ message,
81
+ preserving_proto_field_name=False,
82
+ indent=2,
83
+ sort_keys=False,
84
+ use_integers_for_enums=False,
85
+ descriptor_pool=None,
86
+ float_precision=None,
87
+ ensure_ascii=True,
88
+ always_print_fields_with_no_presence=False,
89
+ ):
90
+ """Converts protobuf message to JSON format.
91
+
92
+ Args:
93
+ message: The protocol buffers message instance to serialize.
94
+ always_print_fields_with_no_presence: If True, fields without
95
+ presence (implicit presence scalars, repeated fields, and map fields) will
96
+ always be serialized. Any field that supports presence is not affected by
97
+ this option (including singular message fields and oneof fields).
98
+ preserving_proto_field_name: If True, use the original proto field names as
99
+ defined in the .proto file. If False, convert the field names to
100
+ lowerCamelCase.
101
+ indent: The JSON object will be pretty-printed with this indent level. An
102
+ indent level of 0 or negative will only insert newlines. If the indent
103
+ level is None, no newlines will be inserted.
104
+ sort_keys: If True, then the output will be sorted by field names.
105
+ use_integers_for_enums: If true, print integers instead of enum names.
106
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
107
+ default.
108
+ float_precision: If set, use this to specify float field valid digits.
109
+ ensure_ascii: If True, strings with non-ASCII characters are escaped. If
110
+ False, Unicode strings are returned unchanged.
111
+
112
+ Returns:
113
+ A string containing the JSON formatted protocol buffer message.
114
+ """
115
+ printer = _Printer(
116
+ preserving_proto_field_name,
117
+ use_integers_for_enums,
118
+ descriptor_pool,
119
+ float_precision,
120
+ always_print_fields_with_no_presence
121
+ )
122
+ return printer.ToJsonString(message, indent, sort_keys, ensure_ascii)
123
+
124
+
125
+ def MessageToDict(
126
+ message,
127
+ always_print_fields_with_no_presence=False,
128
+ preserving_proto_field_name=False,
129
+ use_integers_for_enums=False,
130
+ descriptor_pool=None,
131
+ float_precision=None,
132
+ ):
133
+ """Converts protobuf message to a dictionary.
134
+
135
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
136
+
137
+ Args:
138
+ message: The protocol buffers message instance to serialize.
139
+ always_print_fields_with_no_presence: If True, fields without
140
+ presence (implicit presence scalars, repeated fields, and map fields) will
141
+ always be serialized. Any field that supports presence is not affected by
142
+ this option (including singular message fields and oneof fields).
143
+ preserving_proto_field_name: If True, use the original proto field names as
144
+ defined in the .proto file. If False, convert the field names to
145
+ lowerCamelCase.
146
+ use_integers_for_enums: If true, print integers instead of enum names.
147
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
148
+ default.
149
+ float_precision: If set, use this to specify float field valid digits.
150
+
151
+ Returns:
152
+ A dict representation of the protocol buffer message.
153
+ """
154
+ printer = _Printer(
155
+ preserving_proto_field_name,
156
+ use_integers_for_enums,
157
+ descriptor_pool,
158
+ float_precision,
159
+ always_print_fields_with_no_presence,
160
+ )
161
+ # pylint: disable=protected-access
162
+ return printer._MessageToJsonObject(message)
163
+
164
+
165
+ def _IsMapEntry(field):
166
+ return (
167
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE
168
+ and field.message_type.has_options
169
+ and field.message_type.GetOptions().map_entry
170
+ )
171
+
172
+
173
+ class _Printer(object):
174
+ """JSON format printer for protocol message."""
175
+
176
+ def __init__(
177
+ self,
178
+ preserving_proto_field_name=False,
179
+ use_integers_for_enums=False,
180
+ descriptor_pool=None,
181
+ float_precision=None,
182
+ always_print_fields_with_no_presence=False,
183
+ ):
184
+ self.always_print_fields_with_no_presence = (
185
+ always_print_fields_with_no_presence
186
+ )
187
+ self.preserving_proto_field_name = preserving_proto_field_name
188
+ self.use_integers_for_enums = use_integers_for_enums
189
+ self.descriptor_pool = descriptor_pool
190
+ if float_precision:
191
+ self.float_format = '.{}g'.format(float_precision)
192
+ else:
193
+ self.float_format = None
194
+
195
+ def ToJsonString(self, message, indent, sort_keys, ensure_ascii):
196
+ js = self._MessageToJsonObject(message)
197
+ return json.dumps(
198
+ js, indent=indent, sort_keys=sort_keys, ensure_ascii=ensure_ascii
199
+ )
200
+
201
+ def _MessageToJsonObject(self, message):
202
+ """Converts message to an object according to Proto3 JSON Specification."""
203
+ message_descriptor = message.DESCRIPTOR
204
+ full_name = message_descriptor.full_name
205
+ if _IsWrapperMessage(message_descriptor):
206
+ return self._WrapperMessageToJsonObject(message)
207
+ if full_name in _WKTJSONMETHODS:
208
+ return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
209
+ js = {}
210
+ return self._RegularMessageToJsonObject(message, js)
211
+
212
+ def _RegularMessageToJsonObject(self, message, js):
213
+ """Converts normal message according to Proto3 JSON Specification."""
214
+ fields = message.ListFields()
215
+
216
+ try:
217
+ for field, value in fields:
218
+ if self.preserving_proto_field_name:
219
+ name = field.name
220
+ else:
221
+ name = field.json_name
222
+ if _IsMapEntry(field):
223
+ # Convert a map field.
224
+ v_field = field.message_type.fields_by_name['value']
225
+ js_map = {}
226
+ for key in value:
227
+ if isinstance(key, bool):
228
+ if key:
229
+ recorded_key = 'true'
230
+ else:
231
+ recorded_key = 'false'
232
+ else:
233
+ recorded_key = str(key)
234
+ js_map[recorded_key] = self._FieldToJsonObject(v_field, value[key])
235
+ js[name] = js_map
236
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
237
+ # Convert a repeated field.
238
+ js[name] = [self._FieldToJsonObject(field, k) for k in value]
239
+ elif field.is_extension:
240
+ name = '[%s]' % field.full_name
241
+ js[name] = self._FieldToJsonObject(field, value)
242
+ else:
243
+ js[name] = self._FieldToJsonObject(field, value)
244
+
245
+ # Serialize default value if including_default_value_fields is True.
246
+ if (
247
+ self.always_print_fields_with_no_presence
248
+ ):
249
+ message_descriptor = message.DESCRIPTOR
250
+ for field in message_descriptor.fields:
251
+
252
+ # always_print_fields_with_no_presence doesn't apply to
253
+ # any field which supports presence.
254
+ if (
255
+ self.always_print_fields_with_no_presence
256
+ and field.has_presence
257
+ ):
258
+ continue
259
+
260
+ if self.preserving_proto_field_name:
261
+ name = field.name
262
+ else:
263
+ name = field.json_name
264
+ if name in js:
265
+ # Skip the field which has been serialized already.
266
+ continue
267
+ if _IsMapEntry(field):
268
+ js[name] = {}
269
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
270
+ js[name] = []
271
+ else:
272
+ js[name] = self._FieldToJsonObject(field, field.default_value)
273
+
274
+ except ValueError as e:
275
+ raise SerializeToJsonError(
276
+ 'Failed to serialize {0} field: {1}.'.format(field.name, e)
277
+ ) from e
278
+
279
+ return js
280
+
281
+ def _FieldToJsonObject(self, field, value):
282
+ """Converts field value according to Proto3 JSON Specification."""
283
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
284
+ return self._MessageToJsonObject(value)
285
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
286
+ if self.use_integers_for_enums:
287
+ return value
288
+ if field.enum_type.full_name == 'google.protobuf.NullValue':
289
+ return None
290
+ enum_value = field.enum_type.values_by_number.get(value, None)
291
+ if enum_value is not None:
292
+ return enum_value.name
293
+ else:
294
+ if field.enum_type.is_closed:
295
+ raise SerializeToJsonError(
296
+ 'Enum field contains an integer value '
297
+ 'which can not mapped to an enum value.'
298
+ )
299
+ else:
300
+ return value
301
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
302
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
303
+ # Use base64 Data encoding for bytes
304
+ return base64.b64encode(value).decode('utf-8')
305
+ else:
306
+ return str(value)
307
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
308
+ return bool(value)
309
+ elif field.cpp_type in _INT64_TYPES:
310
+ return str(value)
311
+ elif field.cpp_type in _FLOAT_TYPES:
312
+ if math.isinf(value):
313
+ if value < 0.0:
314
+ return _NEG_INFINITY
315
+ else:
316
+ return _INFINITY
317
+ if math.isnan(value):
318
+ return _NAN
319
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
320
+ if self.float_format:
321
+ return float(format(value, self.float_format))
322
+ else:
323
+ return type_checkers.ToShortestFloat(value)
324
+
325
+ return value
326
+
327
+ def _AnyMessageToJsonObject(self, message):
328
+ """Converts Any message according to Proto3 JSON Specification."""
329
+ if not message.ListFields():
330
+ return {}
331
+ # Must print @type first, use OrderedDict instead of {}
332
+ js = OrderedDict()
333
+ type_url = message.type_url
334
+ js['@type'] = type_url
335
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
336
+ sub_message.ParseFromString(message.value)
337
+ message_descriptor = sub_message.DESCRIPTOR
338
+ full_name = message_descriptor.full_name
339
+ if _IsWrapperMessage(message_descriptor):
340
+ js['value'] = self._WrapperMessageToJsonObject(sub_message)
341
+ return js
342
+ if full_name in _WKTJSONMETHODS:
343
+ js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0], sub_message)(
344
+ self
345
+ )
346
+ return js
347
+ return self._RegularMessageToJsonObject(sub_message, js)
348
+
349
+ def _GenericMessageToJsonObject(self, message):
350
+ """Converts message according to Proto3 JSON Specification."""
351
+ # Duration, Timestamp and FieldMask have ToJsonString method to do the
352
+ # convert. Users can also call the method directly.
353
+ return message.ToJsonString()
354
+
355
+ def _ValueMessageToJsonObject(self, message):
356
+ """Converts Value message according to Proto3 JSON Specification."""
357
+ which = message.WhichOneof('kind')
358
+ # If the Value message is not set treat as null_value when serialize
359
+ # to JSON. The parse back result will be different from original message.
360
+ if which is None or which == 'null_value':
361
+ return None
362
+ if which == 'list_value':
363
+ return self._ListValueMessageToJsonObject(message.list_value)
364
+ if which == 'number_value':
365
+ value = message.number_value
366
+ if math.isinf(value):
367
+ raise ValueError(
368
+ 'Fail to serialize Infinity for Value.number_value, '
369
+ 'which would parse as string_value'
370
+ )
371
+ if math.isnan(value):
372
+ raise ValueError(
373
+ 'Fail to serialize NaN for Value.number_value, '
374
+ 'which would parse as string_value'
375
+ )
376
+ else:
377
+ value = getattr(message, which)
378
+ oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
379
+ return self._FieldToJsonObject(oneof_descriptor, value)
380
+
381
+ def _ListValueMessageToJsonObject(self, message):
382
+ """Converts ListValue message according to Proto3 JSON Specification."""
383
+ return [self._ValueMessageToJsonObject(value) for value in message.values]
384
+
385
+ def _StructMessageToJsonObject(self, message):
386
+ """Converts Struct message according to Proto3 JSON Specification."""
387
+ fields = message.fields
388
+ ret = {}
389
+ for key in fields:
390
+ ret[key] = self._ValueMessageToJsonObject(fields[key])
391
+ return ret
392
+
393
+ def _WrapperMessageToJsonObject(self, message):
394
+ return self._FieldToJsonObject(
395
+ message.DESCRIPTOR.fields_by_name['value'], message.value
396
+ )
397
+
398
+
399
+ def _IsWrapperMessage(message_descriptor):
400
+ return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
401
+
402
+
403
+ def _DuplicateChecker(js):
404
+ result = {}
405
+ for name, value in js:
406
+ if name in result:
407
+ raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
408
+ result[name] = value
409
+ return result
410
+
411
+
412
+ def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
413
+ """Creates a message from a type URL."""
414
+ db = symbol_database.Default()
415
+ pool = db.pool if descriptor_pool is None else descriptor_pool
416
+ type_name = type_url.split('/')[-1]
417
+ try:
418
+ message_descriptor = pool.FindMessageTypeByName(type_name)
419
+ except KeyError as e:
420
+ raise TypeError(
421
+ 'Can not find message descriptor by type_url: {0}'.format(type_url)
422
+ ) from e
423
+ message_class = message_factory.GetMessageClass(message_descriptor)
424
+ return message_class()
425
+
426
+
427
+ def Parse(
428
+ text,
429
+ message,
430
+ ignore_unknown_fields=False,
431
+ descriptor_pool=None,
432
+ max_recursion_depth=100,
433
+ ):
434
+ """Parses a JSON representation of a protocol message into a message.
435
+
436
+ Args:
437
+ text: Message JSON representation.
438
+ message: A protocol buffer message to merge into.
439
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
440
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
441
+ default.
442
+ max_recursion_depth: max recursion depth of JSON message to be deserialized.
443
+ JSON messages over this depth will fail to be deserialized. Default value
444
+ is 100.
445
+
446
+ Returns:
447
+ The same message passed as argument.
448
+
449
+ Raises::
450
+ ParseError: On JSON parsing problems.
451
+ """
452
+ if not isinstance(text, str):
453
+ text = text.decode('utf-8')
454
+
455
+ try:
456
+ js = json.loads(text, object_pairs_hook=_DuplicateChecker)
457
+ except Exception as e:
458
+ raise ParseError('Failed to load JSON: {0}.'.format(str(e))) from e
459
+
460
+ try:
461
+ return ParseDict(
462
+ js, message, ignore_unknown_fields, descriptor_pool, max_recursion_depth
463
+ )
464
+ except ParseError as e:
465
+ raise e
466
+ except Exception as e:
467
+ raise ParseError(
468
+ 'Failed to parse JSON: {0}: {1}.'.format(type(e).__name__, str(e))
469
+ ) from e
470
+
471
+
472
+ def ParseDict(
473
+ js_dict,
474
+ message,
475
+ ignore_unknown_fields=False,
476
+ descriptor_pool=None,
477
+ max_recursion_depth=100,
478
+ ):
479
+ """Parses a JSON dictionary representation into a message.
480
+
481
+ Args:
482
+ js_dict: Dict representation of a JSON message.
483
+ message: A protocol buffer message to merge into.
484
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
485
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
486
+ default.
487
+ max_recursion_depth: max recursion depth of JSON message to be deserialized.
488
+ JSON messages over this depth will fail to be deserialized. Default value
489
+ is 100.
490
+
491
+ Returns:
492
+ The same message passed as argument.
493
+ """
494
+ parser = _Parser(ignore_unknown_fields, descriptor_pool, max_recursion_depth)
495
+ parser.ConvertMessage(js_dict, message, '')
496
+ return message
497
+
498
+
499
+ _INT_OR_FLOAT = (int, float)
500
+
501
+
502
+ class _Parser(object):
503
+ """JSON format parser for protocol message."""
504
+
505
+ def __init__(
506
+ self, ignore_unknown_fields, descriptor_pool, max_recursion_depth
507
+ ):
508
+ self.ignore_unknown_fields = ignore_unknown_fields
509
+ self.descriptor_pool = descriptor_pool
510
+ self.max_recursion_depth = max_recursion_depth
511
+ self.recursion_depth = 0
512
+
513
+ def ConvertMessage(self, value, message, path):
514
+ """Convert a JSON object into a message.
515
+
516
+ Args:
517
+ value: A JSON object.
518
+ message: A WKT or regular protocol message to record the data.
519
+ path: parent path to log parse error info.
520
+
521
+ Raises:
522
+ ParseError: In case of convert problems.
523
+ """
524
+ self.recursion_depth += 1
525
+ if self.recursion_depth > self.max_recursion_depth:
526
+ raise ParseError(
527
+ 'Message too deep. Max recursion depth is {0}'.format(
528
+ self.max_recursion_depth
529
+ )
530
+ )
531
+ message_descriptor = message.DESCRIPTOR
532
+ full_name = message_descriptor.full_name
533
+ if not path:
534
+ path = message_descriptor.name
535
+ if _IsWrapperMessage(message_descriptor):
536
+ self._ConvertWrapperMessage(value, message, path)
537
+ elif full_name in _WKTJSONMETHODS:
538
+ methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self)
539
+ else:
540
+ self._ConvertFieldValuePair(value, message, path)
541
+ self.recursion_depth -= 1
542
+
543
+ def _ConvertFieldValuePair(self, js, message, path):
544
+ """Convert field value pairs into regular message.
545
+
546
+ Args:
547
+ js: A JSON object to convert the field value pairs.
548
+ message: A regular protocol message to record the data.
549
+ path: parent path to log parse error info.
550
+
551
+ Raises:
552
+ ParseError: In case of problems converting.
553
+ """
554
+ names = []
555
+ message_descriptor = message.DESCRIPTOR
556
+ fields_by_json_name = dict(
557
+ (f.json_name, f) for f in message_descriptor.fields
558
+ )
559
+ for name in js:
560
+ try:
561
+ field = fields_by_json_name.get(name, None)
562
+ if not field:
563
+ field = message_descriptor.fields_by_name.get(name, None)
564
+ if not field and _VALID_EXTENSION_NAME.match(name):
565
+ if not message_descriptor.is_extendable:
566
+ raise ParseError(
567
+ 'Message type {0} does not have extensions at {1}'.format(
568
+ message_descriptor.full_name, path
569
+ )
570
+ )
571
+ identifier = name[1:-1] # strip [] brackets
572
+ # pylint: disable=protected-access
573
+ field = message.Extensions._FindExtensionByName(identifier)
574
+ # pylint: enable=protected-access
575
+ if not field:
576
+ # Try looking for extension by the message type name, dropping the
577
+ # field name following the final . separator in full_name.
578
+ identifier = '.'.join(identifier.split('.')[:-1])
579
+ # pylint: disable=protected-access
580
+ field = message.Extensions._FindExtensionByName(identifier)
581
+ # pylint: enable=protected-access
582
+ if not field:
583
+ if self.ignore_unknown_fields:
584
+ continue
585
+ raise ParseError(
586
+ (
587
+ 'Message type "{0}" has no field named "{1}" at "{2}".\n'
588
+ ' Available Fields(except extensions): "{3}"'
589
+ ).format(
590
+ message_descriptor.full_name,
591
+ name,
592
+ path,
593
+ [f.json_name for f in message_descriptor.fields],
594
+ )
595
+ )
596
+ if name in names:
597
+ raise ParseError(
598
+ 'Message type "{0}" should not have multiple '
599
+ '"{1}" fields at "{2}".'.format(
600
+ message.DESCRIPTOR.full_name, name, path
601
+ )
602
+ )
603
+ names.append(name)
604
+ value = js[name]
605
+ # Check no other oneof field is parsed.
606
+ if field.containing_oneof is not None and value is not None:
607
+ oneof_name = field.containing_oneof.name
608
+ if oneof_name in names:
609
+ raise ParseError(
610
+ 'Message type "{0}" should not have multiple '
611
+ '"{1}" oneof fields at "{2}".'.format(
612
+ message.DESCRIPTOR.full_name, oneof_name, path
613
+ )
614
+ )
615
+ names.append(oneof_name)
616
+
617
+ if value is None:
618
+ if (
619
+ field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
620
+ and field.message_type.full_name == 'google.protobuf.Value'
621
+ ):
622
+ sub_message = getattr(message, field.name)
623
+ sub_message.null_value = 0
624
+ elif (
625
+ field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM
626
+ and field.enum_type.full_name == 'google.protobuf.NullValue'
627
+ ):
628
+ setattr(message, field.name, 0)
629
+ else:
630
+ message.ClearField(field.name)
631
+ continue
632
+
633
+ # Parse field value.
634
+ if _IsMapEntry(field):
635
+ message.ClearField(field.name)
636
+ self._ConvertMapFieldValue(
637
+ value, message, field, '{0}.{1}'.format(path, name)
638
+ )
639
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
640
+ message.ClearField(field.name)
641
+ if not isinstance(value, list):
642
+ raise ParseError(
643
+ 'repeated field {0} must be in [] which is {1} at {2}'.format(
644
+ name, value, path
645
+ )
646
+ )
647
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
648
+ # Repeated message field.
649
+ for index, item in enumerate(value):
650
+ sub_message = getattr(message, field.name).add()
651
+ # None is a null_value in Value.
652
+ if (
653
+ item is None
654
+ and sub_message.DESCRIPTOR.full_name
655
+ != 'google.protobuf.Value'
656
+ ):
657
+ raise ParseError(
658
+ 'null is not allowed to be used as an element'
659
+ ' in a repeated field at {0}.{1}[{2}]'.format(
660
+ path, name, index
661
+ )
662
+ )
663
+ self.ConvertMessage(
664
+ item, sub_message, '{0}.{1}[{2}]'.format(path, name, index)
665
+ )
666
+ else:
667
+ # Repeated scalar field.
668
+ for index, item in enumerate(value):
669
+ if item is None:
670
+ raise ParseError(
671
+ 'null is not allowed to be used as an element'
672
+ ' in a repeated field at {0}.{1}[{2}]'.format(
673
+ path, name, index
674
+ )
675
+ )
676
+ self._ConvertAndAppendScalar(
677
+ message, field, item, '{0}.{1}[{2}]'.format(path, name, index))
678
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
679
+ if field.is_extension:
680
+ sub_message = message.Extensions[field]
681
+ else:
682
+ sub_message = getattr(message, field.name)
683
+ sub_message.SetInParent()
684
+ self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name))
685
+ else:
686
+ if field.is_extension:
687
+ self._ConvertAndSetScalarExtension(message, field, value, '{0}.{1}'.format(path, name))
688
+ else:
689
+ self._ConvertAndSetScalar(message, field, value, '{0}.{1}'.format(path, name))
690
+ except ParseError as e:
691
+ if field and field.containing_oneof is None:
692
+ raise ParseError(
693
+ 'Failed to parse {0} field: {1}.'.format(name, e)
694
+ ) from e
695
+ else:
696
+ raise ParseError(str(e)) from e
697
+ except ValueError as e:
698
+ raise ParseError(
699
+ 'Failed to parse {0} field: {1}.'.format(name, e)
700
+ ) from e
701
+ except TypeError as e:
702
+ raise ParseError(
703
+ 'Failed to parse {0} field: {1}.'.format(name, e)
704
+ ) from e
705
+
706
+ def _ConvertAnyMessage(self, value, message, path):
707
+ """Convert a JSON representation into Any message."""
708
+ if isinstance(value, dict) and not value:
709
+ return
710
+ try:
711
+ type_url = value['@type']
712
+ except KeyError as e:
713
+ raise ParseError(
714
+ '@type is missing when parsing any message at {0}'.format(path)
715
+ ) from e
716
+
717
+ try:
718
+ sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
719
+ except TypeError as e:
720
+ raise ParseError('{0} at {1}'.format(e, path)) from e
721
+ message_descriptor = sub_message.DESCRIPTOR
722
+ full_name = message_descriptor.full_name
723
+ if _IsWrapperMessage(message_descriptor):
724
+ self._ConvertWrapperMessage(
725
+ value['value'], sub_message, '{0}.value'.format(path)
726
+ )
727
+ elif full_name in _WKTJSONMETHODS:
728
+ methodcaller(
729
+ _WKTJSONMETHODS[full_name][1],
730
+ value['value'],
731
+ sub_message,
732
+ '{0}.value'.format(path),
733
+ )(self)
734
+ else:
735
+ del value['@type']
736
+ self._ConvertFieldValuePair(value, sub_message, path)
737
+ value['@type'] = type_url
738
+ # Sets Any message
739
+ message.value = sub_message.SerializeToString()
740
+ message.type_url = type_url
741
+
742
+ def _ConvertGenericMessage(self, value, message, path):
743
+ """Convert a JSON representation into message with FromJsonString."""
744
+ # Duration, Timestamp, FieldMask have a FromJsonString method to do the
745
+ # conversion. Users can also call the method directly.
746
+ try:
747
+ message.FromJsonString(value)
748
+ except ValueError as e:
749
+ raise ParseError('{0} at {1}'.format(e, path)) from e
750
+
751
+ def _ConvertValueMessage(self, value, message, path):
752
+ """Convert a JSON representation into Value message."""
753
+ if isinstance(value, dict):
754
+ self._ConvertStructMessage(value, message.struct_value, path)
755
+ elif isinstance(value, list):
756
+ self._ConvertListValueMessage(value, message.list_value, path)
757
+ elif value is None:
758
+ message.null_value = 0
759
+ elif isinstance(value, bool):
760
+ message.bool_value = value
761
+ elif isinstance(value, str):
762
+ message.string_value = value
763
+ elif isinstance(value, _INT_OR_FLOAT):
764
+ message.number_value = value
765
+ else:
766
+ raise ParseError(
767
+ 'Value {0} has unexpected type {1} at {2}'.format(
768
+ value, type(value), path
769
+ )
770
+ )
771
+
772
+ def _ConvertListValueMessage(self, value, message, path):
773
+ """Convert a JSON representation into ListValue message."""
774
+ if not isinstance(value, list):
775
+ raise ParseError(
776
+ 'ListValue must be in [] which is {0} at {1}'.format(value, path)
777
+ )
778
+ message.ClearField('values')
779
+ for index, item in enumerate(value):
780
+ self._ConvertValueMessage(
781
+ item, message.values.add(), '{0}[{1}]'.format(path, index)
782
+ )
783
+
784
+ def _ConvertStructMessage(self, value, message, path):
785
+ """Convert a JSON representation into Struct message."""
786
+ if not isinstance(value, dict):
787
+ raise ParseError(
788
+ 'Struct must be in a dict which is {0} at {1}'.format(value, path)
789
+ )
790
+ # Clear will mark the struct as modified so it will be created even if
791
+ # there are no values.
792
+ message.Clear()
793
+ for key in value:
794
+ self._ConvertValueMessage(
795
+ value[key], message.fields[key], '{0}.{1}'.format(path, key)
796
+ )
797
+ return
798
+
799
+ def _ConvertWrapperMessage(self, value, message, path):
800
+ """Convert a JSON representation into Wrapper message."""
801
+ field = message.DESCRIPTOR.fields_by_name['value']
802
+ self._ConvertAndSetScalar(message, field, value, path='{0}.value'.format(path))
803
+
804
+ def _ConvertMapFieldValue(self, value, message, field, path):
805
+ """Convert map field value for a message map field.
806
+
807
+ Args:
808
+ value: A JSON object to convert the map field value.
809
+ message: A protocol message to record the converted data.
810
+ field: The descriptor of the map field to be converted.
811
+ path: parent path to log parse error info.
812
+
813
+ Raises:
814
+ ParseError: In case of convert problems.
815
+ """
816
+ if not isinstance(value, dict):
817
+ raise ParseError(
818
+ 'Map field {0} must be in a dict which is {1} at {2}'.format(
819
+ field.name, value, path
820
+ )
821
+ )
822
+ key_field = field.message_type.fields_by_name['key']
823
+ value_field = field.message_type.fields_by_name['value']
824
+ for key in value:
825
+ key_value = _ConvertScalarFieldValue(
826
+ key, key_field, '{0}.key'.format(path), True
827
+ )
828
+ if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
829
+ self.ConvertMessage(
830
+ value[key],
831
+ getattr(message, field.name)[key_value],
832
+ '{0}[{1}]'.format(path, key_value),
833
+ )
834
+ else:
835
+ self._ConvertAndSetScalarToMapKey(
836
+ message,
837
+ field,
838
+ key_value,
839
+ value[key],
840
+ path='{0}[{1}]'.format(path, key_value))
841
+
842
+ def _ConvertAndSetScalarExtension(self, message, extension_field, js_value, path):
843
+ """Convert scalar from js_value and assign it to message.Extensions[extension_field]."""
844
+ try:
845
+ message.Extensions[extension_field] = _ConvertScalarFieldValue(
846
+ js_value, extension_field, path)
847
+ except EnumStringValueParseError:
848
+ if not self.ignore_unknown_fields:
849
+ raise
850
+
851
+ def _ConvertAndSetScalar(self, message, field, js_value, path):
852
+ """Convert scalar from js_value and assign it to message.field."""
853
+ try:
854
+ setattr(
855
+ message,
856
+ field.name,
857
+ _ConvertScalarFieldValue(js_value, field, path))
858
+ except EnumStringValueParseError:
859
+ if not self.ignore_unknown_fields:
860
+ raise
861
+
862
+ def _ConvertAndAppendScalar(self, message, repeated_field, js_value, path):
863
+ """Convert scalar from js_value and append it to message.repeated_field."""
864
+ try:
865
+ getattr(message, repeated_field.name).append(
866
+ _ConvertScalarFieldValue(js_value, repeated_field, path))
867
+ except EnumStringValueParseError:
868
+ if not self.ignore_unknown_fields:
869
+ raise
870
+
871
+ def _ConvertAndSetScalarToMapKey(self, message, map_field, converted_key, js_value, path):
872
+ """Convert scalar from 'js_value' and add it to message.map_field[converted_key]."""
873
+ try:
874
+ getattr(message, map_field.name)[converted_key] = _ConvertScalarFieldValue(
875
+ js_value, map_field.message_type.fields_by_name['value'], path,
876
+ )
877
+ except EnumStringValueParseError:
878
+ if not self.ignore_unknown_fields:
879
+ raise
880
+
881
+
882
+ def _ConvertScalarFieldValue(value, field, path, require_str=False):
883
+ """Convert a single scalar field value.
884
+
885
+ Args:
886
+ value: A scalar value to convert the scalar field value.
887
+ field: The descriptor of the field to convert.
888
+ path: parent path to log parse error info.
889
+ require_str: If True, the field value must be a str.
890
+
891
+ Returns:
892
+ The converted scalar field value
893
+
894
+ Raises:
895
+ ParseError: In case of convert problems.
896
+ EnumStringValueParseError: In case of unknown enum string value.
897
+ """
898
+ try:
899
+ if field.cpp_type in _INT_TYPES:
900
+ return _ConvertInteger(value)
901
+ elif field.cpp_type in _FLOAT_TYPES:
902
+ return _ConvertFloat(value, field)
903
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
904
+ return _ConvertBool(value, require_str)
905
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
906
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
907
+ if isinstance(value, str):
908
+ encoded = value.encode('utf-8')
909
+ else:
910
+ encoded = value
911
+ # Add extra padding '='
912
+ padded_value = encoded + b'=' * (4 - len(encoded) % 4)
913
+ return base64.urlsafe_b64decode(padded_value)
914
+ else:
915
+ # Checking for unpaired surrogates appears to be unreliable,
916
+ # depending on the specific Python version, so we check manually.
917
+ if _UNPAIRED_SURROGATE_PATTERN.search(value):
918
+ raise ParseError('Unpaired surrogate')
919
+ return value
920
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
921
+ # Convert an enum value.
922
+ enum_value = field.enum_type.values_by_name.get(value, None)
923
+ if enum_value is None:
924
+ try:
925
+ number = int(value)
926
+ enum_value = field.enum_type.values_by_number.get(number, None)
927
+ except ValueError as e:
928
+ # Since parsing to integer failed and lookup in values_by_name didn't
929
+ # find this name, we have an enum string value which is unknown.
930
+ raise EnumStringValueParseError(
931
+ 'Invalid enum value {0} for enum type {1}'.format(
932
+ value, field.enum_type.full_name
933
+ )
934
+ ) from e
935
+ if enum_value is None:
936
+ if field.enum_type.is_closed:
937
+ raise ParseError(
938
+ 'Invalid enum value {0} for enum type {1}'.format(
939
+ value, field.enum_type.full_name
940
+ )
941
+ )
942
+ else:
943
+ return number
944
+ return enum_value.number
945
+ except EnumStringValueParseError as e:
946
+ raise EnumStringValueParseError('{0} at {1}'.format(e, path)) from e
947
+ except ParseError as e:
948
+ raise ParseError('{0} at {1}'.format(e, path)) from e
949
+
950
+
951
+ def _ConvertInteger(value):
952
+ """Convert an integer.
953
+
954
+ Args:
955
+ value: A scalar value to convert.
956
+
957
+ Returns:
958
+ The integer value.
959
+
960
+ Raises:
961
+ ParseError: If an integer couldn't be consumed.
962
+ """
963
+ if isinstance(value, float) and not value.is_integer():
964
+ raise ParseError("Couldn't parse integer: {0}".format(value))
965
+
966
+ if isinstance(value, str) and value.find(' ') != -1:
967
+ raise ParseError('Couldn\'t parse integer: "{0}"'.format(value))
968
+
969
+ if isinstance(value, bool):
970
+ raise ParseError(
971
+ 'Bool value {0} is not acceptable for integer field'.format(value)
972
+ )
973
+
974
+ return int(value)
975
+
976
+
977
+ def _ConvertFloat(value, field):
978
+ """Convert an floating point number."""
979
+ if isinstance(value, float):
980
+ if math.isnan(value):
981
+ raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead')
982
+ if math.isinf(value):
983
+ if value > 0:
984
+ raise ParseError(
985
+ "Couldn't parse Infinity or value too large, "
986
+ 'use quoted "Infinity" instead'
987
+ )
988
+ else:
989
+ raise ParseError(
990
+ "Couldn't parse -Infinity or value too small, "
991
+ 'use quoted "-Infinity" instead'
992
+ )
993
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
994
+ # pylint: disable=protected-access
995
+ if value > type_checkers._FLOAT_MAX:
996
+ raise ParseError('Float value too large')
997
+ # pylint: disable=protected-access
998
+ if value < type_checkers._FLOAT_MIN:
999
+ raise ParseError('Float value too small')
1000
+ if value == 'nan':
1001
+ raise ParseError('Couldn\'t parse float "nan", use "NaN" instead')
1002
+ try:
1003
+ # Assume Python compatible syntax.
1004
+ return float(value)
1005
+ except ValueError as e:
1006
+ # Check alternative spellings.
1007
+ if value == _NEG_INFINITY:
1008
+ return float('-inf')
1009
+ elif value == _INFINITY:
1010
+ return float('inf')
1011
+ elif value == _NAN:
1012
+ return float('nan')
1013
+ else:
1014
+ raise ParseError("Couldn't parse float: {0}".format(value)) from e
1015
+
1016
+
1017
+ def _ConvertBool(value, require_str):
1018
+ """Convert a boolean value.
1019
+
1020
+ Args:
1021
+ value: A scalar value to convert.
1022
+ require_str: If True, value must be a str.
1023
+
1024
+ Returns:
1025
+ The bool parsed.
1026
+
1027
+ Raises:
1028
+ ParseError: If a boolean value couldn't be consumed.
1029
+ """
1030
+ if require_str:
1031
+ if value == 'true':
1032
+ return True
1033
+ elif value == 'false':
1034
+ return False
1035
+ else:
1036
+ raise ParseError('Expected "true" or "false", not {0}'.format(value))
1037
+
1038
+ if not isinstance(value, bool):
1039
+ raise ParseError('Expected true or false without quotes')
1040
+ return value
1041
+
1042
+
1043
+ _WKTJSONMETHODS = {
1044
+ 'google.protobuf.Any': ['_AnyMessageToJsonObject', '_ConvertAnyMessage'],
1045
+ 'google.protobuf.Duration': [
1046
+ '_GenericMessageToJsonObject',
1047
+ '_ConvertGenericMessage',
1048
+ ],
1049
+ 'google.protobuf.FieldMask': [
1050
+ '_GenericMessageToJsonObject',
1051
+ '_ConvertGenericMessage',
1052
+ ],
1053
+ 'google.protobuf.ListValue': [
1054
+ '_ListValueMessageToJsonObject',
1055
+ '_ConvertListValueMessage',
1056
+ ],
1057
+ 'google.protobuf.Struct': [
1058
+ '_StructMessageToJsonObject',
1059
+ '_ConvertStructMessage',
1060
+ ],
1061
+ 'google.protobuf.Timestamp': [
1062
+ '_GenericMessageToJsonObject',
1063
+ '_ConvertGenericMessage',
1064
+ ],
1065
+ 'google.protobuf.Value': [
1066
+ '_ValueMessageToJsonObject',
1067
+ '_ConvertValueMessage',
1068
+ ],
1069
+ }
parrot/lib/python3.10/site-packages/google/protobuf/message.py ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ # TODO: We should just make these methods all "pure-virtual" and move
9
+ # all implementation out, into reflection.py for now.
10
+
11
+
12
+ """Contains an abstract base class for protocol messages."""
13
+
14
+ __author__ = 'robinson@google.com (Will Robinson)'
15
+
16
+ class Error(Exception):
17
+ """Base error type for this module."""
18
+ pass
19
+
20
+
21
+ class DecodeError(Error):
22
+ """Exception raised when deserializing messages."""
23
+ pass
24
+
25
+
26
+ class EncodeError(Error):
27
+ """Exception raised when serializing messages."""
28
+ pass
29
+
30
+
31
+ class Message(object):
32
+
33
+ """Abstract base class for protocol messages.
34
+
35
+ Protocol message classes are almost always generated by the protocol
36
+ compiler. These generated types subclass Message and implement the methods
37
+ shown below.
38
+ """
39
+
40
+ # TODO: Link to an HTML document here.
41
+
42
+ # TODO: Document that instances of this class will also
43
+ # have an Extensions attribute with __getitem__ and __setitem__.
44
+ # Again, not sure how to best convey this.
45
+
46
+ # TODO: Document these fields and methods.
47
+
48
+ __slots__ = []
49
+
50
+ #: The :class:`google.protobuf.Descriptor`
51
+ # for this message type.
52
+ DESCRIPTOR = None
53
+
54
+ def __deepcopy__(self, memo=None):
55
+ clone = type(self)()
56
+ clone.MergeFrom(self)
57
+ return clone
58
+
59
+ def __eq__(self, other_msg):
60
+ """Recursively compares two messages by value and structure."""
61
+ raise NotImplementedError
62
+
63
+ def __ne__(self, other_msg):
64
+ # Can't just say self != other_msg, since that would infinitely recurse. :)
65
+ return not self == other_msg
66
+
67
+ def __hash__(self):
68
+ raise TypeError('unhashable object')
69
+
70
+ def __str__(self):
71
+ """Outputs a human-readable representation of the message."""
72
+ raise NotImplementedError
73
+
74
+ def __unicode__(self):
75
+ """Outputs a human-readable representation of the message."""
76
+ raise NotImplementedError
77
+
78
+ def __contains__(self, field_name_or_key):
79
+ """Checks if a certain field is set for the message.
80
+
81
+ Has presence fields return true if the field is set, false if the field is
82
+ not set. Fields without presence do raise `ValueError` (this includes
83
+ repeated fields, map fields, and implicit presence fields).
84
+
85
+ If field_name is not defined in the message descriptor, `ValueError` will
86
+ be raised.
87
+ Note: WKT Struct checks if the key is contained in fields. ListValue checks
88
+ if the item is contained in the list.
89
+
90
+ Args:
91
+ field_name_or_key: For Struct, the key (str) of the fields map. For
92
+ ListValue, any type that may be contained in the list. For other
93
+ messages, name of the field (str) to check for presence.
94
+
95
+ Returns:
96
+ bool: For Struct, whether the item is contained in fields. For ListValue,
97
+ whether the item is contained in the list. For other message,
98
+ whether a value has been set for the named field.
99
+
100
+ Raises:
101
+ ValueError: For normal messages, if the `field_name_or_key` is not a
102
+ member of this message or `field_name_or_key` is not a string.
103
+ """
104
+ raise NotImplementedError
105
+
106
+ def MergeFrom(self, other_msg):
107
+ """Merges the contents of the specified message into current message.
108
+
109
+ This method merges the contents of the specified message into the current
110
+ message. Singular fields that are set in the specified message overwrite
111
+ the corresponding fields in the current message. Repeated fields are
112
+ appended. Singular sub-messages and groups are recursively merged.
113
+
114
+ Args:
115
+ other_msg (Message): A message to merge into the current message.
116
+ """
117
+ raise NotImplementedError
118
+
119
+ def CopyFrom(self, other_msg):
120
+ """Copies the content of the specified message into the current message.
121
+
122
+ The method clears the current message and then merges the specified
123
+ message using MergeFrom.
124
+
125
+ Args:
126
+ other_msg (Message): A message to copy into the current one.
127
+ """
128
+ if self is other_msg:
129
+ return
130
+ self.Clear()
131
+ self.MergeFrom(other_msg)
132
+
133
+ def Clear(self):
134
+ """Clears all data that was set in the message."""
135
+ raise NotImplementedError
136
+
137
+ def SetInParent(self):
138
+ """Mark this as present in the parent.
139
+
140
+ This normally happens automatically when you assign a field of a
141
+ sub-message, but sometimes you want to make the sub-message
142
+ present while keeping it empty. If you find yourself using this,
143
+ you may want to reconsider your design.
144
+ """
145
+ raise NotImplementedError
146
+
147
+ def IsInitialized(self):
148
+ """Checks if the message is initialized.
149
+
150
+ Returns:
151
+ bool: The method returns True if the message is initialized (i.e. all of
152
+ its required fields are set).
153
+ """
154
+ raise NotImplementedError
155
+
156
+ # TODO: MergeFromString() should probably return None and be
157
+ # implemented in terms of a helper that returns the # of bytes read. Our
158
+ # deserialization routines would use the helper when recursively
159
+ # deserializing, but the end user would almost always just want the no-return
160
+ # MergeFromString().
161
+
162
+ def MergeFromString(self, serialized):
163
+ """Merges serialized protocol buffer data into this message.
164
+
165
+ When we find a field in `serialized` that is already present
166
+ in this message:
167
+
168
+ - If it's a "repeated" field, we append to the end of our list.
169
+ - Else, if it's a scalar, we overwrite our field.
170
+ - Else, (it's a nonrepeated composite), we recursively merge
171
+ into the existing composite.
172
+
173
+ Args:
174
+ serialized (bytes): Any object that allows us to call
175
+ ``memoryview(serialized)`` to access a string of bytes using the
176
+ buffer interface.
177
+
178
+ Returns:
179
+ int: The number of bytes read from `serialized`.
180
+ For non-group messages, this will always be `len(serialized)`,
181
+ but for messages which are actually groups, this will
182
+ generally be less than `len(serialized)`, since we must
183
+ stop when we reach an ``END_GROUP`` tag. Note that if
184
+ we *do* stop because of an ``END_GROUP`` tag, the number
185
+ of bytes returned does not include the bytes
186
+ for the ``END_GROUP`` tag information.
187
+
188
+ Raises:
189
+ DecodeError: if the input cannot be parsed.
190
+ """
191
+ # TODO: Document handling of unknown fields.
192
+ # TODO: When we switch to a helper, this will return None.
193
+ raise NotImplementedError
194
+
195
+ def ParseFromString(self, serialized):
196
+ """Parse serialized protocol buffer data in binary form into this message.
197
+
198
+ Like :func:`MergeFromString()`, except we clear the object first.
199
+
200
+ Raises:
201
+ message.DecodeError if the input cannot be parsed.
202
+ """
203
+ self.Clear()
204
+ return self.MergeFromString(serialized)
205
+
206
+ def SerializeToString(self, **kwargs):
207
+ """Serializes the protocol message to a binary string.
208
+
209
+ Keyword Args:
210
+ deterministic (bool): If true, requests deterministic serialization
211
+ of the protobuf, with predictable ordering of map keys.
212
+
213
+ Returns:
214
+ A binary string representation of the message if all of the required
215
+ fields in the message are set (i.e. the message is initialized).
216
+
217
+ Raises:
218
+ EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
219
+ """
220
+ raise NotImplementedError
221
+
222
+ def SerializePartialToString(self, **kwargs):
223
+ """Serializes the protocol message to a binary string.
224
+
225
+ This method is similar to SerializeToString but doesn't check if the
226
+ message is initialized.
227
+
228
+ Keyword Args:
229
+ deterministic (bool): If true, requests deterministic serialization
230
+ of the protobuf, with predictable ordering of map keys.
231
+
232
+ Returns:
233
+ bytes: A serialized representation of the partial message.
234
+ """
235
+ raise NotImplementedError
236
+
237
+ # TODO: Decide whether we like these better
238
+ # than auto-generated has_foo() and clear_foo() methods
239
+ # on the instances themselves. This way is less consistent
240
+ # with C++, but it makes reflection-type access easier and
241
+ # reduces the number of magically autogenerated things.
242
+ #
243
+ # TODO: Be sure to document (and test) exactly
244
+ # which field names are accepted here. Are we case-sensitive?
245
+ # What do we do with fields that share names with Python keywords
246
+ # like 'lambda' and 'yield'?
247
+ #
248
+ # nnorwitz says:
249
+ # """
250
+ # Typically (in python), an underscore is appended to names that are
251
+ # keywords. So they would become lambda_ or yield_.
252
+ # """
253
+ def ListFields(self):
254
+ """Returns a list of (FieldDescriptor, value) tuples for present fields.
255
+
256
+ A message field is non-empty if HasField() would return true. A singular
257
+ primitive field is non-empty if HasField() would return true in proto2 or it
258
+ is non zero in proto3. A repeated field is non-empty if it contains at least
259
+ one element. The fields are ordered by field number.
260
+
261
+ Returns:
262
+ list[tuple(FieldDescriptor, value)]: field descriptors and values
263
+ for all fields in the message which are not empty. The values vary by
264
+ field type.
265
+ """
266
+ raise NotImplementedError
267
+
268
+ def HasField(self, field_name):
269
+ """Checks if a certain field is set for the message.
270
+
271
+ For a oneof group, checks if any field inside is set. Note that if the
272
+ field_name is not defined in the message descriptor, :exc:`ValueError` will
273
+ be raised.
274
+
275
+ Args:
276
+ field_name (str): The name of the field to check for presence.
277
+
278
+ Returns:
279
+ bool: Whether a value has been set for the named field.
280
+
281
+ Raises:
282
+ ValueError: if the `field_name` is not a member of this message.
283
+ """
284
+ raise NotImplementedError
285
+
286
+ def ClearField(self, field_name):
287
+ """Clears the contents of a given field.
288
+
289
+ Inside a oneof group, clears the field set. If the name neither refers to a
290
+ defined field or oneof group, :exc:`ValueError` is raised.
291
+
292
+ Args:
293
+ field_name (str): The name of the field to check for presence.
294
+
295
+ Raises:
296
+ ValueError: if the `field_name` is not a member of this message.
297
+ """
298
+ raise NotImplementedError
299
+
300
+ def WhichOneof(self, oneof_group):
301
+ """Returns the name of the field that is set inside a oneof group.
302
+
303
+ If no field is set, returns None.
304
+
305
+ Args:
306
+ oneof_group (str): the name of the oneof group to check.
307
+
308
+ Returns:
309
+ str or None: The name of the group that is set, or None.
310
+
311
+ Raises:
312
+ ValueError: no group with the given name exists
313
+ """
314
+ raise NotImplementedError
315
+
316
+ def HasExtension(self, field_descriptor):
317
+ """Checks if a certain extension is present for this message.
318
+
319
+ Extensions are retrieved using the :attr:`Extensions` mapping (if present).
320
+
321
+ Args:
322
+ field_descriptor: The field descriptor for the extension to check.
323
+
324
+ Returns:
325
+ bool: Whether the extension is present for this message.
326
+
327
+ Raises:
328
+ KeyError: if the extension is repeated. Similar to repeated fields,
329
+ there is no separate notion of presence: a "not present" repeated
330
+ extension is an empty list.
331
+ """
332
+ raise NotImplementedError
333
+
334
+ def ClearExtension(self, field_descriptor):
335
+ """Clears the contents of a given extension.
336
+
337
+ Args:
338
+ field_descriptor: The field descriptor for the extension to clear.
339
+ """
340
+ raise NotImplementedError
341
+
342
+ def UnknownFields(self):
343
+ """Returns the UnknownFieldSet.
344
+
345
+ Returns:
346
+ UnknownFieldSet: The unknown fields stored in this message.
347
+ """
348
+ raise NotImplementedError
349
+
350
+ def DiscardUnknownFields(self):
351
+ """Clears all fields in the :class:`UnknownFieldSet`.
352
+
353
+ This operation is recursive for nested message.
354
+ """
355
+ raise NotImplementedError
356
+
357
+ def ByteSize(self):
358
+ """Returns the serialized size of this message.
359
+
360
+ Recursively calls ByteSize() on all contained messages.
361
+
362
+ Returns:
363
+ int: The number of bytes required to serialize this message.
364
+ """
365
+ raise NotImplementedError
366
+
367
+ @classmethod
368
+ def FromString(cls, s):
369
+ raise NotImplementedError
370
+
371
+ def _SetListener(self, message_listener):
372
+ """Internal method used by the protocol message implementation.
373
+ Clients should not call this directly.
374
+
375
+ Sets a listener that this message will call on certain state transitions.
376
+
377
+ The purpose of this method is to register back-edges from children to
378
+ parents at runtime, for the purpose of setting "has" bits and
379
+ byte-size-dirty bits in the parent and ancestor objects whenever a child or
380
+ descendant object is modified.
381
+
382
+ If the client wants to disconnect this Message from the object tree, she
383
+ explicitly sets callback to None.
384
+
385
+ If message_listener is None, unregisters any existing listener. Otherwise,
386
+ message_listener must implement the MessageListener interface in
387
+ internal/message_listener.py, and we discard any listener registered
388
+ via a previous _SetListener() call.
389
+ """
390
+ raise NotImplementedError
391
+
392
+ def __getstate__(self):
393
+ """Support the pickle protocol."""
394
+ return dict(serialized=self.SerializePartialToString())
395
+
396
+ def __setstate__(self, state):
397
+ """Support the pickle protocol."""
398
+ self.__init__()
399
+ serialized = state['serialized']
400
+ # On Python 3, using encoding='latin1' is required for unpickling
401
+ # protos pickled by Python 2.
402
+ if not isinstance(serialized, bytes):
403
+ serialized = serialized.encode('latin1')
404
+ self.ParseFromString(serialized)
405
+
406
+ def __reduce__(self):
407
+ message_descriptor = self.DESCRIPTOR
408
+ if message_descriptor.containing_type is None:
409
+ return type(self), (), self.__getstate__()
410
+ # the message type must be nested.
411
+ # Python does not pickle nested classes; use the symbol_database on the
412
+ # receiving end.
413
+ container = message_descriptor
414
+ return (_InternalConstructMessage, (container.full_name,),
415
+ self.__getstate__())
416
+
417
+
418
+ def _InternalConstructMessage(full_name):
419
+ """Constructs a nested message."""
420
+ from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
421
+
422
+ return symbol_database.Default().GetSymbol(full_name)()
parrot/lib/python3.10/site-packages/google/protobuf/message_factory.py ADDED
@@ -0,0 +1,237 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Provides a factory class for generating dynamic messages.
9
+
10
+ The easiest way to use this class is if you have access to the FileDescriptor
11
+ protos containing the messages you want to create you can just do the following:
12
+
13
+ message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
14
+ my_proto_instance = message_classes['some.proto.package.MessageName']()
15
+ """
16
+
17
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
18
+
19
+ import warnings
20
+
21
+ from google.protobuf import descriptor_pool
22
+ from google.protobuf import message
23
+ from google.protobuf.internal import api_implementation
24
+
25
+ if api_implementation.Type() == 'python':
26
+ from google.protobuf.internal import python_message as message_impl
27
+ else:
28
+ from google.protobuf.pyext import cpp_message as message_impl # pylint: disable=g-import-not-at-top
29
+
30
+
31
+ # The type of all Message classes.
32
+ _GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
33
+
34
+
35
+ def GetMessageClass(descriptor):
36
+ """Obtains a proto2 message class based on the passed in descriptor.
37
+
38
+ Passing a descriptor with a fully qualified name matching a previous
39
+ invocation will cause the same class to be returned.
40
+
41
+ Args:
42
+ descriptor: The descriptor to build from.
43
+
44
+ Returns:
45
+ A class describing the passed in descriptor.
46
+ """
47
+ concrete_class = getattr(descriptor, '_concrete_class', None)
48
+ if concrete_class:
49
+ return concrete_class
50
+ return _InternalCreateMessageClass(descriptor)
51
+
52
+
53
+ def GetMessageClassesForFiles(files, pool):
54
+ """Gets all the messages from specified files.
55
+
56
+ This will find and resolve dependencies, failing if the descriptor
57
+ pool cannot satisfy them.
58
+
59
+ Args:
60
+ files: The file names to extract messages from.
61
+ pool: The descriptor pool to find the files including the dependent files.
62
+
63
+ Returns:
64
+ A dictionary mapping proto names to the message classes.
65
+ """
66
+ result = {}
67
+ for file_name in files:
68
+ file_desc = pool.FindFileByName(file_name)
69
+ for desc in file_desc.message_types_by_name.values():
70
+ result[desc.full_name] = GetMessageClass(desc)
71
+
72
+ # While the extension FieldDescriptors are created by the descriptor pool,
73
+ # the python classes created in the factory need them to be registered
74
+ # explicitly, which is done below.
75
+ #
76
+ # The call to RegisterExtension will specifically check if the
77
+ # extension was already registered on the object and either
78
+ # ignore the registration if the original was the same, or raise
79
+ # an error if they were different.
80
+
81
+ for extension in file_desc.extensions_by_name.values():
82
+ _ = GetMessageClass(extension.containing_type)
83
+ if api_implementation.Type() != 'python':
84
+ # TODO: Remove this check here. Duplicate extension
85
+ # register check should be in descriptor_pool.
86
+ if extension is not pool.FindExtensionByNumber(
87
+ extension.containing_type, extension.number
88
+ ):
89
+ raise ValueError('Double registration of Extensions')
90
+ # Recursively load protos for extension field, in order to be able to
91
+ # fully represent the extension. This matches the behavior for regular
92
+ # fields too.
93
+ if extension.message_type:
94
+ GetMessageClass(extension.message_type)
95
+ return result
96
+
97
+
98
+ def _InternalCreateMessageClass(descriptor):
99
+ """Builds a proto2 message class based on the passed in descriptor.
100
+
101
+ Args:
102
+ descriptor: The descriptor to build from.
103
+
104
+ Returns:
105
+ A class describing the passed in descriptor.
106
+ """
107
+ descriptor_name = descriptor.name
108
+ result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
109
+ descriptor_name,
110
+ (message.Message,),
111
+ {
112
+ 'DESCRIPTOR': descriptor,
113
+ # If module not set, it wrongly points to message_factory module.
114
+ '__module__': None,
115
+ },
116
+ )
117
+ for field in descriptor.fields:
118
+ if field.message_type:
119
+ GetMessageClass(field.message_type)
120
+
121
+ for extension in result_class.DESCRIPTOR.extensions:
122
+ extended_class = GetMessageClass(extension.containing_type)
123
+ if api_implementation.Type() != 'python':
124
+ # TODO: Remove this check here. Duplicate extension
125
+ # register check should be in descriptor_pool.
126
+ pool = extension.containing_type.file.pool
127
+ if extension is not pool.FindExtensionByNumber(
128
+ extension.containing_type, extension.number
129
+ ):
130
+ raise ValueError('Double registration of Extensions')
131
+ if extension.message_type:
132
+ GetMessageClass(extension.message_type)
133
+ return result_class
134
+
135
+
136
+ # Deprecated. Please use GetMessageClass() or GetMessageClassesForFiles()
137
+ # method above instead.
138
+ class MessageFactory(object):
139
+ """Factory for creating Proto2 messages from descriptors in a pool."""
140
+
141
+ def __init__(self, pool=None):
142
+ """Initializes a new factory."""
143
+ self.pool = pool or descriptor_pool.DescriptorPool()
144
+
145
+ def GetPrototype(self, descriptor):
146
+ """Obtains a proto2 message class based on the passed in descriptor.
147
+
148
+ Passing a descriptor with a fully qualified name matching a previous
149
+ invocation will cause the same class to be returned.
150
+
151
+ Args:
152
+ descriptor: The descriptor to build from.
153
+
154
+ Returns:
155
+ A class describing the passed in descriptor.
156
+ """
157
+ warnings.warn(
158
+ 'MessageFactory class is deprecated. Please use '
159
+ 'GetMessageClass() instead of MessageFactory.GetPrototype. '
160
+ 'MessageFactory class will be removed after 2024.',
161
+ stacklevel=2,
162
+ )
163
+ return GetMessageClass(descriptor)
164
+
165
+ def CreatePrototype(self, descriptor):
166
+ """Builds a proto2 message class based on the passed in descriptor.
167
+
168
+ Don't call this function directly, it always creates a new class. Call
169
+ GetMessageClass() instead.
170
+
171
+ Args:
172
+ descriptor: The descriptor to build from.
173
+
174
+ Returns:
175
+ A class describing the passed in descriptor.
176
+ """
177
+ warnings.warn(
178
+ 'Directly call CreatePrototype is wrong. Please use '
179
+ 'GetMessageClass() method instead. Directly use '
180
+ 'CreatePrototype will raise error after July 2023.',
181
+ stacklevel=2,
182
+ )
183
+ return _InternalCreateMessageClass(descriptor)
184
+
185
+ def GetMessages(self, files):
186
+ """Gets all the messages from a specified file.
187
+
188
+ This will find and resolve dependencies, failing if the descriptor
189
+ pool cannot satisfy them.
190
+
191
+ Args:
192
+ files: The file names to extract messages from.
193
+
194
+ Returns:
195
+ A dictionary mapping proto names to the message classes. This will include
196
+ any dependent messages as well as any messages defined in the same file as
197
+ a specified message.
198
+ """
199
+ warnings.warn(
200
+ 'MessageFactory class is deprecated. Please use '
201
+ 'GetMessageClassesForFiles() instead of '
202
+ 'MessageFactory.GetMessages(). MessageFactory class '
203
+ 'will be removed after 2024.',
204
+ stacklevel=2,
205
+ )
206
+ return GetMessageClassesForFiles(files, self.pool)
207
+
208
+
209
+ def GetMessages(file_protos, pool=None):
210
+ """Builds a dictionary of all the messages available in a set of files.
211
+
212
+ Args:
213
+ file_protos: Iterable of FileDescriptorProto to build messages out of.
214
+ pool: The descriptor pool to add the file protos.
215
+
216
+ Returns:
217
+ A dictionary mapping proto names to the message classes. This will include
218
+ any dependent messages as well as any messages defined in the same file as
219
+ a specified message.
220
+ """
221
+ # The cpp implementation of the protocol buffer library requires to add the
222
+ # message in topological order of the dependency graph.
223
+ des_pool = pool or descriptor_pool.DescriptorPool()
224
+ file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
225
+
226
+ def _AddFile(file_proto):
227
+ for dependency in file_proto.dependency:
228
+ if dependency in file_by_name:
229
+ # Remove from elements to be visited, in order to cut cycles.
230
+ _AddFile(file_by_name.pop(dependency))
231
+ des_pool.Add(file_proto)
232
+
233
+ while file_by_name:
234
+ _AddFile(file_by_name.popitem()[1])
235
+ return GetMessageClassesForFiles(
236
+ [file_proto.name for file_proto in file_protos], des_pool
237
+ )
parrot/lib/python3.10/site-packages/google/protobuf/proto_builder.py ADDED
@@ -0,0 +1,111 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Dynamic Protobuf class creator."""
9
+
10
+ from collections import OrderedDict
11
+ import hashlib
12
+ import os
13
+
14
+ from google.protobuf import descriptor_pb2
15
+ from google.protobuf import descriptor
16
+ from google.protobuf import descriptor_pool
17
+ from google.protobuf import message_factory
18
+
19
+
20
+ def _GetMessageFromFactory(pool, full_name):
21
+ """Get a proto class from the MessageFactory by name.
22
+
23
+ Args:
24
+ pool: a descriptor pool.
25
+ full_name: str, the fully qualified name of the proto type.
26
+ Returns:
27
+ A class, for the type identified by full_name.
28
+ Raises:
29
+ KeyError, if the proto is not found in the factory's descriptor pool.
30
+ """
31
+ proto_descriptor = pool.FindMessageTypeByName(full_name)
32
+ proto_cls = message_factory.GetMessageClass(proto_descriptor)
33
+ return proto_cls
34
+
35
+
36
+ def MakeSimpleProtoClass(fields, full_name=None, pool=None):
37
+ """Create a Protobuf class whose fields are basic types.
38
+
39
+ Note: this doesn't validate field names!
40
+
41
+ Args:
42
+ fields: dict of {name: field_type} mappings for each field in the proto. If
43
+ this is an OrderedDict the order will be maintained, otherwise the
44
+ fields will be sorted by name.
45
+ full_name: optional str, the fully-qualified name of the proto type.
46
+ pool: optional DescriptorPool instance.
47
+ Returns:
48
+ a class, the new protobuf class with a FileDescriptor.
49
+ """
50
+ pool_instance = pool or descriptor_pool.DescriptorPool()
51
+ if full_name is not None:
52
+ try:
53
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
54
+ return proto_cls
55
+ except KeyError:
56
+ # The factory's DescriptorPool doesn't know about this class yet.
57
+ pass
58
+
59
+ # Get a list of (name, field_type) tuples from the fields dict. If fields was
60
+ # an OrderedDict we keep the order, but otherwise we sort the field to ensure
61
+ # consistent ordering.
62
+ field_items = fields.items()
63
+ if not isinstance(fields, OrderedDict):
64
+ field_items = sorted(field_items)
65
+
66
+ # Use a consistent file name that is unlikely to conflict with any imported
67
+ # proto files.
68
+ fields_hash = hashlib.sha1()
69
+ for f_name, f_type in field_items:
70
+ fields_hash.update(f_name.encode('utf-8'))
71
+ fields_hash.update(str(f_type).encode('utf-8'))
72
+ proto_file_name = fields_hash.hexdigest() + '.proto'
73
+
74
+ # If the proto is anonymous, use the same hash to name it.
75
+ if full_name is None:
76
+ full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
77
+ fields_hash.hexdigest())
78
+ try:
79
+ proto_cls = _GetMessageFromFactory(pool_instance, full_name)
80
+ return proto_cls
81
+ except KeyError:
82
+ # The factory's DescriptorPool doesn't know about this class yet.
83
+ pass
84
+
85
+ # This is the first time we see this proto: add a new descriptor to the pool.
86
+ pool_instance.Add(
87
+ _MakeFileDescriptorProto(proto_file_name, full_name, field_items))
88
+ return _GetMessageFromFactory(pool_instance, full_name)
89
+
90
+
91
+ def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
92
+ """Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
93
+ package, name = full_name.rsplit('.', 1)
94
+ file_proto = descriptor_pb2.FileDescriptorProto()
95
+ file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
96
+ file_proto.package = package
97
+ desc_proto = file_proto.message_type.add()
98
+ desc_proto.name = name
99
+ for f_number, (f_name, f_type) in enumerate(field_items, 1):
100
+ field_proto = desc_proto.field.add()
101
+ field_proto.name = f_name
102
+ # # If the number falls in the reserved range, reassign it to the correct
103
+ # # number after the range.
104
+ if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER:
105
+ f_number += (
106
+ descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER -
107
+ descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1)
108
+ field_proto.number = f_number
109
+ field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
110
+ field_proto.type = f_type
111
+ return file_proto
parrot/lib/python3.10/site-packages/google/protobuf/proto_json.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Contains the Nextgen Pythonic Protobuf JSON APIs."""
9
+
10
+ from typing import Optional, Type
11
+
12
+ from google.protobuf.message import Message
13
+ from google.protobuf.descriptor_pool import DescriptorPool
14
+ from google.protobuf import json_format
15
+
16
+ def serialize(
17
+ message: Message,
18
+ always_print_fields_with_no_presence: bool=False,
19
+ preserving_proto_field_name: bool=False,
20
+ use_integers_for_enums: bool=False,
21
+ descriptor_pool: Optional[DescriptorPool]=None,
22
+ float_precision: int=None,
23
+ ) -> dict:
24
+ """Converts protobuf message to a dictionary.
25
+
26
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
27
+
28
+ Args:
29
+ message: The protocol buffers message instance to serialize.
30
+ always_print_fields_with_no_presence: If True, fields without
31
+ presence (implicit presence scalars, repeated fields, and map fields) will
32
+ always be serialized. Any field that supports presence is not affected by
33
+ this option (including singular message fields and oneof fields).
34
+ preserving_proto_field_name: If True, use the original proto field names as
35
+ defined in the .proto file. If False, convert the field names to
36
+ lowerCamelCase.
37
+ use_integers_for_enums: If true, print integers instead of enum names.
38
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
39
+ default.
40
+ float_precision: If set, use this to specify float field valid digits.
41
+
42
+ Returns:
43
+ A dict representation of the protocol buffer message.
44
+ """
45
+ return json_format.MessageToDict(
46
+ message,
47
+ always_print_fields_with_no_presence=always_print_fields_with_no_presence,
48
+ preserving_proto_field_name=preserving_proto_field_name,
49
+ use_integers_for_enums=use_integers_for_enums,
50
+ float_precision=float_precision,
51
+ )
52
+
53
+ def parse(
54
+ message_class: Type[Message],
55
+ js_dict: dict,
56
+ ignore_unknown_fields: bool=False,
57
+ descriptor_pool: Optional[DescriptorPool]=None,
58
+ max_recursion_depth: int=100
59
+ ) -> Message:
60
+ """Parses a JSON dictionary representation into a message.
61
+
62
+ Args:
63
+ message_class: The message meta class.
64
+ js_dict: Dict representation of a JSON message.
65
+ ignore_unknown_fields: If True, do not raise errors for unknown fields.
66
+ descriptor_pool: A Descriptor Pool for resolving types. If None use the
67
+ default.
68
+ max_recursion_depth: max recursion depth of JSON message to be deserialized.
69
+ JSON messages over this depth will fail to be deserialized. Default value
70
+ is 100.
71
+
72
+ Returns:
73
+ A new message passed from json_dict.
74
+ """
75
+ new_message = message_class()
76
+ json_format.ParseDict(
77
+ js_dict=js_dict,
78
+ message=new_message,
79
+ ignore_unknown_fields=ignore_unknown_fields,
80
+ descriptor_pool=descriptor_pool,
81
+ max_recursion_depth=max_recursion_depth,
82
+ )
83
+ return new_message
parrot/lib/python3.10/site-packages/google/protobuf/pyext/__init__.py ADDED
File without changes
parrot/lib/python3.10/site-packages/google/protobuf/pyext/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (173 Bytes). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/pyext/__pycache__/cpp_message.cpython-310.pyc ADDED
Binary file (1.55 kB). View file
 
parrot/lib/python3.10/site-packages/google/protobuf/pyext/cpp_message.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Protocol message implementation hooks for C++ implementation.
9
+
10
+ Contains helper functions used to create protocol message classes from
11
+ Descriptor objects at runtime backed by the protocol buffer C++ API.
12
+ """
13
+
14
+ __author__ = 'tibell@google.com (Johan Tibell)'
15
+
16
+ from google.protobuf.internal import api_implementation
17
+
18
+
19
+ # pylint: disable=protected-access
20
+ _message = api_implementation._c_module
21
+ # TODO: Remove this import after fix api_implementation
22
+ if _message is None:
23
+ from google.protobuf.pyext import _message
24
+
25
+
26
+ class GeneratedProtocolMessageType(_message.MessageMeta):
27
+
28
+ """Metaclass for protocol message classes created at runtime from Descriptors.
29
+
30
+ The protocol compiler currently uses this metaclass to create protocol
31
+ message classes at runtime. Clients can also manually create their own
32
+ classes at runtime, as in this example:
33
+
34
+ mydescriptor = Descriptor(.....)
35
+ factory = symbol_database.Default()
36
+ factory.pool.AddDescriptor(mydescriptor)
37
+ MyProtoClass = factory.GetPrototype(mydescriptor)
38
+ myproto_instance = MyProtoClass()
39
+ myproto.foo_field = 23
40
+ ...
41
+
42
+ The above example will not work for nested types. If you wish to include them,
43
+ use reflection.MakeClass() instead of manually instantiating the class in
44
+ order to create the appropriate class structure.
45
+ """
46
+
47
+ # Must be consistent with the protocol-compiler code in
48
+ # proto2/compiler/internal/generator.*.
49
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
parrot/lib/python3.10/site-packages/google/protobuf/runtime_version.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """Protobuf Runtime versions and validators.
9
+
10
+ It should only be accessed by Protobuf gencodes and tests. DO NOT USE it
11
+ elsewhere.
12
+ """
13
+
14
+ __author__ = 'shaod@google.com (Dennis Shao)'
15
+
16
+ from enum import Enum
17
+ import os
18
+ import warnings
19
+
20
+
21
+ class Domain(Enum):
22
+ GOOGLE_INTERNAL = 1
23
+ PUBLIC = 2
24
+
25
+
26
+ # The versions of this Python Protobuf runtime to be changed automatically by
27
+ # the Protobuf release process. Do not edit them manually.
28
+ # These OSS versions are not stripped to avoid merging conflicts.
29
+ OSS_DOMAIN = Domain.PUBLIC
30
+ OSS_MAJOR = 5
31
+ OSS_MINOR = 28
32
+ OSS_PATCH = 2
33
+ OSS_SUFFIX = ''
34
+
35
+ DOMAIN = OSS_DOMAIN
36
+ MAJOR = OSS_MAJOR
37
+ MINOR = OSS_MINOR
38
+ PATCH = OSS_PATCH
39
+ SUFFIX = OSS_SUFFIX
40
+
41
+
42
+ class VersionError(Exception):
43
+ """Exception class for version violation."""
44
+
45
+
46
+ def _ReportVersionError(msg):
47
+ raise VersionError(msg)
48
+
49
+
50
+ def ValidateProtobufRuntimeVersion(
51
+ gen_domain, gen_major, gen_minor, gen_patch, gen_suffix, location
52
+ ):
53
+ """Function to validate versions.
54
+
55
+ Args:
56
+ gen_domain: The domain where the code was generated from.
57
+ gen_major: The major version number of the gencode.
58
+ gen_minor: The minor version number of the gencode.
59
+ gen_patch: The patch version number of the gencode.
60
+ gen_suffix: The version suffix e.g. '-dev', '-rc1' of the gencode.
61
+ location: The proto location that causes the version violation.
62
+
63
+ Raises:
64
+ VersionError: if gencode version is invalid or incompatible with the
65
+ runtime.
66
+ """
67
+
68
+ disable_flag = os.getenv('TEMORARILY_DISABLE_PROTOBUF_VERSION_CHECK')
69
+ if disable_flag is not None and disable_flag.lower() == 'true':
70
+ return
71
+
72
+ version = f'{MAJOR}.{MINOR}.{PATCH}{SUFFIX}'
73
+ gen_version = f'{gen_major}.{gen_minor}.{gen_patch}{gen_suffix}'
74
+
75
+ if gen_major < 0 or gen_minor < 0 or gen_patch < 0:
76
+ raise VersionError(f'Invalid gencode version: {gen_version}')
77
+
78
+ error_prompt = (
79
+ 'See Protobuf version guarantees at'
80
+ ' https://protobuf.dev/support/cross-version-runtime-guarantee.'
81
+ )
82
+
83
+ if gen_domain != DOMAIN:
84
+ _ReportVersionError(
85
+ 'Detected mismatched Protobuf Gencode/Runtime domains when loading'
86
+ f' {location}: gencode {gen_domain.name} runtime {DOMAIN.name}.'
87
+ ' Cross-domain usage of Protobuf is not supported.'
88
+ )
89
+
90
+ if gen_major != MAJOR:
91
+ if gen_major == MAJOR - 1:
92
+ warnings.warn(
93
+ 'Protobuf gencode version %s is exactly one major version older than'
94
+ ' the runtime version %s at %s. Please update the gencode to avoid'
95
+ ' compatibility violations in the next runtime release.'
96
+ % (gen_version, version, location)
97
+ )
98
+ else:
99
+ _ReportVersionError(
100
+ 'Detected mismatched Protobuf Gencode/Runtime major versions when'
101
+ f' loading {location}: gencode {gen_version} runtime {version}.'
102
+ f' Same major version is required. {error_prompt}'
103
+ )
104
+
105
+ if MINOR < gen_minor or (MINOR == gen_minor and PATCH < gen_patch):
106
+ _ReportVersionError(
107
+ 'Detected incompatible Protobuf Gencode/Runtime versions when loading'
108
+ f' {location}: gencode {gen_version} runtime {version}. Runtime version'
109
+ f' cannot be older than the linked gencode version. {error_prompt}'
110
+ )
111
+ elif MINOR > gen_minor or PATCH > gen_patch:
112
+ warnings.warn(
113
+ 'Protobuf gencode version %s is older than the runtime version %s at'
114
+ ' %s. Please avoid checked-in Protobuf gencode that can be obsolete.'
115
+ % (gen_version, version, location)
116
+ )
117
+
118
+ if gen_suffix != SUFFIX:
119
+ _ReportVersionError(
120
+ 'Detected mismatched Protobuf Gencode/Runtime version suffixes when'
121
+ f' loading {location}: gencode {gen_version} runtime {version}.'
122
+ f' Version suffixes must be the same. {error_prompt}'
123
+ )
parrot/lib/python3.10/site-packages/google/protobuf/service.py ADDED
@@ -0,0 +1,213 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """DEPRECATED: Declares the RPC service interfaces.
9
+
10
+ This module declares the abstract interfaces underlying proto2 RPC
11
+ services. These are intended to be independent of any particular RPC
12
+ implementation, so that proto2 services can be used on top of a variety
13
+ of implementations. Starting with version 2.3.0, RPC implementations should
14
+ not try to build on these, but should instead provide code generator plugins
15
+ which generate code specific to the particular RPC implementation. This way
16
+ the generated code can be more appropriate for the implementation in use
17
+ and can avoid unnecessary layers of indirection.
18
+ """
19
+
20
+ __author__ = 'petar@google.com (Petar Petrov)'
21
+
22
+ import warnings
23
+
24
+ warnings.warn(
25
+ 'google.protobuf.service module is deprecated. RPC implementations '
26
+ 'should provide code generator plugins which generate code specific to '
27
+ 'the RPC implementation. service.py will be removed in Jan 2025',
28
+ stacklevel=2,
29
+ )
30
+
31
+ class RpcException(Exception):
32
+ """Exception raised on failed blocking RPC method call."""
33
+ pass
34
+
35
+
36
+ class Service(object):
37
+
38
+ """Abstract base interface for protocol-buffer-based RPC services.
39
+
40
+ Services themselves are abstract classes (implemented either by servers or as
41
+ stubs), but they subclass this base interface. The methods of this
42
+ interface can be used to call the methods of the service without knowing
43
+ its exact type at compile time (analogous to the Message interface).
44
+ """
45
+
46
+ def GetDescriptor():
47
+ """Retrieves this service's descriptor."""
48
+ raise NotImplementedError
49
+
50
+ def CallMethod(self, method_descriptor, rpc_controller,
51
+ request, done):
52
+ """Calls a method of the service specified by method_descriptor.
53
+
54
+ If "done" is None then the call is blocking and the response
55
+ message will be returned directly. Otherwise the call is asynchronous
56
+ and "done" will later be called with the response value.
57
+
58
+ In the blocking case, RpcException will be raised on error.
59
+
60
+ Preconditions:
61
+
62
+ * method_descriptor.service == GetDescriptor
63
+ * request is of the exact same classes as returned by
64
+ GetRequestClass(method).
65
+ * After the call has started, the request must not be modified.
66
+ * "rpc_controller" is of the correct type for the RPC implementation being
67
+ used by this Service. For stubs, the "correct type" depends on the
68
+ RpcChannel which the stub is using.
69
+
70
+ Postconditions:
71
+
72
+ * "done" will be called when the method is complete. This may be
73
+ before CallMethod() returns or it may be at some point in the future.
74
+ * If the RPC failed, the response value passed to "done" will be None.
75
+ Further details about the failure can be found by querying the
76
+ RpcController.
77
+ """
78
+ raise NotImplementedError
79
+
80
+ def GetRequestClass(self, method_descriptor):
81
+ """Returns the class of the request message for the specified method.
82
+
83
+ CallMethod() requires that the request is of a particular subclass of
84
+ Message. GetRequestClass() gets the default instance of this required
85
+ type.
86
+
87
+ Example:
88
+ method = service.GetDescriptor().FindMethodByName("Foo")
89
+ request = stub.GetRequestClass(method)()
90
+ request.ParseFromString(input)
91
+ service.CallMethod(method, request, callback)
92
+ """
93
+ raise NotImplementedError
94
+
95
+ def GetResponseClass(self, method_descriptor):
96
+ """Returns the class of the response message for the specified method.
97
+
98
+ This method isn't really needed, as the RpcChannel's CallMethod constructs
99
+ the response protocol message. It's provided anyway in case it is useful
100
+ for the caller to know the response type in advance.
101
+ """
102
+ raise NotImplementedError
103
+
104
+
105
+ class RpcController(object):
106
+
107
+ """An RpcController mediates a single method call.
108
+
109
+ The primary purpose of the controller is to provide a way to manipulate
110
+ settings specific to the RPC implementation and to find out about RPC-level
111
+ errors. The methods provided by the RpcController interface are intended
112
+ to be a "least common denominator" set of features which we expect all
113
+ implementations to support. Specific implementations may provide more
114
+ advanced features (e.g. deadline propagation).
115
+ """
116
+
117
+ # Client-side methods below
118
+
119
+ def Reset(self):
120
+ """Resets the RpcController to its initial state.
121
+
122
+ After the RpcController has been reset, it may be reused in
123
+ a new call. Must not be called while an RPC is in progress.
124
+ """
125
+ raise NotImplementedError
126
+
127
+ def Failed(self):
128
+ """Returns true if the call failed.
129
+
130
+ After a call has finished, returns true if the call failed. The possible
131
+ reasons for failure depend on the RPC implementation. Failed() must not
132
+ be called before a call has finished. If Failed() returns true, the
133
+ contents of the response message are undefined.
134
+ """
135
+ raise NotImplementedError
136
+
137
+ def ErrorText(self):
138
+ """If Failed is true, returns a human-readable description of the error."""
139
+ raise NotImplementedError
140
+
141
+ def StartCancel(self):
142
+ """Initiate cancellation.
143
+
144
+ Advises the RPC system that the caller desires that the RPC call be
145
+ canceled. The RPC system may cancel it immediately, may wait awhile and
146
+ then cancel it, or may not even cancel the call at all. If the call is
147
+ canceled, the "done" callback will still be called and the RpcController
148
+ will indicate that the call failed at that time.
149
+ """
150
+ raise NotImplementedError
151
+
152
+ # Server-side methods below
153
+
154
+ def SetFailed(self, reason):
155
+ """Sets a failure reason.
156
+
157
+ Causes Failed() to return true on the client side. "reason" will be
158
+ incorporated into the message returned by ErrorText(). If you find
159
+ you need to return machine-readable information about failures, you
160
+ should incorporate it into your response protocol buffer and should
161
+ NOT call SetFailed().
162
+ """
163
+ raise NotImplementedError
164
+
165
+ def IsCanceled(self):
166
+ """Checks if the client cancelled the RPC.
167
+
168
+ If true, indicates that the client canceled the RPC, so the server may
169
+ as well give up on replying to it. The server should still call the
170
+ final "done" callback.
171
+ """
172
+ raise NotImplementedError
173
+
174
+ def NotifyOnCancel(self, callback):
175
+ """Sets a callback to invoke on cancel.
176
+
177
+ Asks that the given callback be called when the RPC is canceled. The
178
+ callback will always be called exactly once. If the RPC completes without
179
+ being canceled, the callback will be called after completion. If the RPC
180
+ has already been canceled when NotifyOnCancel() is called, the callback
181
+ will be called immediately.
182
+
183
+ NotifyOnCancel() must be called no more than once per request.
184
+ """
185
+ raise NotImplementedError
186
+
187
+
188
+ class RpcChannel(object):
189
+
190
+ """Abstract interface for an RPC channel.
191
+
192
+ An RpcChannel represents a communication line to a service which can be used
193
+ to call that service's methods. The service may be running on another
194
+ machine. Normally, you should not use an RpcChannel directly, but instead
195
+ construct a stub {@link Service} wrapping it. Example:
196
+
197
+ Example:
198
+ RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
199
+ RpcController controller = rpcImpl.Controller()
200
+ MyService service = MyService_Stub(channel)
201
+ service.MyMethod(controller, request, callback)
202
+ """
203
+
204
+ def CallMethod(self, method_descriptor, rpc_controller,
205
+ request, response_class, done):
206
+ """Calls the method identified by the descriptor.
207
+
208
+ Call the given method of the remote service. The signature of this
209
+ procedure looks the same as Service.CallMethod(), but the requirements
210
+ are less strict in one important way: the request object doesn't have to
211
+ be of any specific class as long as its descriptor is method.input_type.
212
+ """
213
+ raise NotImplementedError
parrot/lib/python3.10/site-packages/google/protobuf/source_context_pb2.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/source_context.proto
5
+ # Protobuf Python Version: 5.28.2
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 2,
17
+ '',
18
+ 'google/protobuf/source_context.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\",\n\rSourceContext\x12\x1b\n\tfile_name\x18\x01 \x01(\tR\x08\x66ileNameB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_SOURCECONTEXT']._serialized_start=57
36
+ _globals['_SOURCECONTEXT']._serialized_end=101
37
+ # @@protoc_insertion_point(module_scope)
parrot/lib/python3.10/site-packages/google/protobuf/struct_pb2.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
4
+ # source: google/protobuf/struct.proto
5
+ # Protobuf Python Version: 5.28.2
6
+ """Generated protocol buffer code."""
7
+ from google.protobuf import descriptor as _descriptor
8
+ from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
10
+ from google.protobuf import symbol_database as _symbol_database
11
+ from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 2,
17
+ '',
18
+ 'google/protobuf/struct.proto'
19
+ )
20
+ # @@protoc_insertion_point(imports)
21
+
22
+ _sym_db = _symbol_database.Default()
23
+
24
+
25
+
26
+
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x98\x01\n\x06Struct\x12;\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntryR\x06\x66ields\x1aQ\n\x0b\x46ieldsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.ValueR\x05value:\x02\x38\x01\"\xb2\x02\n\x05Value\x12;\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00R\tnullValue\x12#\n\x0cnumber_value\x18\x02 \x01(\x01H\x00R\x0bnumberValue\x12#\n\x0cstring_value\x18\x03 \x01(\tH\x00R\x0bstringValue\x12\x1f\n\nbool_value\x18\x04 \x01(\x08H\x00R\tboolValue\x12<\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00R\x0bstructValue\x12;\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00R\tlistValueB\x06\n\x04kind\";\n\tListValue\x12.\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.ValueR\x06values*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
28
+
29
+ _globals = globals()
30
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
31
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', _globals)
32
+ if not _descriptor._USE_C_DESCRIPTORS:
33
+ _globals['DESCRIPTOR']._loaded_options = None
34
+ _globals['DESCRIPTOR']._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
35
+ _globals['_STRUCT_FIELDSENTRY']._loaded_options = None
36
+ _globals['_STRUCT_FIELDSENTRY']._serialized_options = b'8\001'
37
+ _globals['_NULLVALUE']._serialized_start=574
38
+ _globals['_NULLVALUE']._serialized_end=601
39
+ _globals['_STRUCT']._serialized_start=50
40
+ _globals['_STRUCT']._serialized_end=202
41
+ _globals['_STRUCT_FIELDSENTRY']._serialized_start=121
42
+ _globals['_STRUCT_FIELDSENTRY']._serialized_end=202
43
+ _globals['_VALUE']._serialized_start=205
44
+ _globals['_VALUE']._serialized_end=511
45
+ _globals['_LISTVALUE']._serialized_start=513
46
+ _globals['_LISTVALUE']._serialized_end=572
47
+ # @@protoc_insertion_point(module_scope)
parrot/lib/python3.10/site-packages/google/protobuf/symbol_database.py ADDED
@@ -0,0 +1,197 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ #
4
+ # Use of this source code is governed by a BSD-style
5
+ # license that can be found in the LICENSE file or at
6
+ # https://developers.google.com/open-source/licenses/bsd
7
+
8
+ """A database of Python protocol buffer generated symbols.
9
+
10
+ SymbolDatabase is the MessageFactory for messages generated at compile time,
11
+ and makes it easy to create new instances of a registered type, given only the
12
+ type's protocol buffer symbol name.
13
+
14
+ Example usage::
15
+
16
+ db = symbol_database.SymbolDatabase()
17
+
18
+ # Register symbols of interest, from one or multiple files.
19
+ db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
20
+ db.RegisterMessage(my_proto_pb2.MyMessage)
21
+ db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
22
+
23
+ # The database can be used as a MessageFactory, to generate types based on
24
+ # their name:
25
+ types = db.GetMessages(['my_proto.proto'])
26
+ my_message_instance = types['MyMessage']()
27
+
28
+ # The database's underlying descriptor pool can be queried, so it's not
29
+ # necessary to know a type's filename to be able to generate it:
30
+ filename = db.pool.FindFileContainingSymbol('MyMessage')
31
+ my_message_instance = db.GetMessages([filename])['MyMessage']()
32
+
33
+ # This functionality is also provided directly via a convenience method:
34
+ my_message_instance = db.GetSymbol('MyMessage')()
35
+ """
36
+
37
+ import warnings
38
+
39
+ from google.protobuf.internal import api_implementation
40
+ from google.protobuf import descriptor_pool
41
+ from google.protobuf import message_factory
42
+
43
+
44
+ class SymbolDatabase():
45
+ """A database of Python generated symbols."""
46
+
47
+ # local cache of registered classes.
48
+ _classes = {}
49
+
50
+ def __init__(self, pool=None):
51
+ """Initializes a new SymbolDatabase."""
52
+ self.pool = pool or descriptor_pool.DescriptorPool()
53
+
54
+ def GetPrototype(self, descriptor):
55
+ warnings.warn('SymbolDatabase.GetPrototype() is deprecated. Please '
56
+ 'use message_factory.GetMessageClass() instead. '
57
+ 'SymbolDatabase.GetPrototype() will be removed soon.')
58
+ return message_factory.GetMessageClass(descriptor)
59
+
60
+ def CreatePrototype(self, descriptor):
61
+ warnings.warn('Directly call CreatePrototype() is wrong. Please use '
62
+ 'message_factory.GetMessageClass() instead. '
63
+ 'SymbolDatabase.CreatePrototype() will be removed soon.')
64
+ return message_factory._InternalCreateMessageClass(descriptor)
65
+
66
+ def GetMessages(self, files):
67
+ warnings.warn('SymbolDatabase.GetMessages() is deprecated. Please use '
68
+ 'message_factory.GetMessageClassedForFiles() instead. '
69
+ 'SymbolDatabase.GetMessages() will be removed soon.')
70
+ return message_factory.GetMessageClassedForFiles(files, self.pool)
71
+
72
+ def RegisterMessage(self, message):
73
+ """Registers the given message type in the local database.
74
+
75
+ Calls to GetSymbol() and GetMessages() will return messages registered here.
76
+
77
+ Args:
78
+ message: A :class:`google.protobuf.message.Message` subclass (or
79
+ instance); its descriptor will be registered.
80
+
81
+ Returns:
82
+ The provided message.
83
+ """
84
+
85
+ desc = message.DESCRIPTOR
86
+ self._classes[desc] = message
87
+ self.RegisterMessageDescriptor(desc)
88
+ return message
89
+
90
+ def RegisterMessageDescriptor(self, message_descriptor):
91
+ """Registers the given message descriptor in the local database.
92
+
93
+ Args:
94
+ message_descriptor (Descriptor): the message descriptor to add.
95
+ """
96
+ if api_implementation.Type() == 'python':
97
+ # pylint: disable=protected-access
98
+ self.pool._AddDescriptor(message_descriptor)
99
+
100
+ def RegisterEnumDescriptor(self, enum_descriptor):
101
+ """Registers the given enum descriptor in the local database.
102
+
103
+ Args:
104
+ enum_descriptor (EnumDescriptor): The enum descriptor to register.
105
+
106
+ Returns:
107
+ EnumDescriptor: The provided descriptor.
108
+ """
109
+ if api_implementation.Type() == 'python':
110
+ # pylint: disable=protected-access
111
+ self.pool._AddEnumDescriptor(enum_descriptor)
112
+ return enum_descriptor
113
+
114
+ def RegisterServiceDescriptor(self, service_descriptor):
115
+ """Registers the given service descriptor in the local database.
116
+
117
+ Args:
118
+ service_descriptor (ServiceDescriptor): the service descriptor to
119
+ register.
120
+ """
121
+ if api_implementation.Type() == 'python':
122
+ # pylint: disable=protected-access
123
+ self.pool._AddServiceDescriptor(service_descriptor)
124
+
125
+ def RegisterFileDescriptor(self, file_descriptor):
126
+ """Registers the given file descriptor in the local database.
127
+
128
+ Args:
129
+ file_descriptor (FileDescriptor): The file descriptor to register.
130
+ """
131
+ if api_implementation.Type() == 'python':
132
+ # pylint: disable=protected-access
133
+ self.pool._InternalAddFileDescriptor(file_descriptor)
134
+
135
+ def GetSymbol(self, symbol):
136
+ """Tries to find a symbol in the local database.
137
+
138
+ Currently, this method only returns message.Message instances, however, if
139
+ may be extended in future to support other symbol types.
140
+
141
+ Args:
142
+ symbol (str): a protocol buffer symbol.
143
+
144
+ Returns:
145
+ A Python class corresponding to the symbol.
146
+
147
+ Raises:
148
+ KeyError: if the symbol could not be found.
149
+ """
150
+
151
+ return self._classes[self.pool.FindMessageTypeByName(symbol)]
152
+
153
+ def GetMessages(self, files):
154
+ # TODO: Fix the differences with MessageFactory.
155
+ """Gets all registered messages from a specified file.
156
+
157
+ Only messages already created and registered will be returned; (this is the
158
+ case for imported _pb2 modules)
159
+ But unlike MessageFactory, this version also returns already defined nested
160
+ messages, but does not register any message extensions.
161
+
162
+ Args:
163
+ files (list[str]): The file names to extract messages from.
164
+
165
+ Returns:
166
+ A dictionary mapping proto names to the message classes.
167
+
168
+ Raises:
169
+ KeyError: if a file could not be found.
170
+ """
171
+
172
+ def _GetAllMessages(desc):
173
+ """Walk a message Descriptor and recursively yields all message names."""
174
+ yield desc
175
+ for msg_desc in desc.nested_types:
176
+ for nested_desc in _GetAllMessages(msg_desc):
177
+ yield nested_desc
178
+
179
+ result = {}
180
+ for file_name in files:
181
+ file_desc = self.pool.FindFileByName(file_name)
182
+ for msg_desc in file_desc.message_types_by_name.values():
183
+ for desc in _GetAllMessages(msg_desc):
184
+ try:
185
+ result[desc.full_name] = self._classes[desc]
186
+ except KeyError:
187
+ # This descriptor has no registered class, skip it.
188
+ pass
189
+ return result
190
+
191
+
192
+ _DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
193
+
194
+
195
+ def Default():
196
+ """Returns the default SymbolDatabase."""
197
+ return _DEFAULT
parrot/lib/python3.10/site-packages/google/protobuf/testdata/__init__.py ADDED
File without changes
parrot/lib/python3.10/site-packages/google/protobuf/testdata/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (176 Bytes). View file