prasb commited on
Commit
e9bd6b7
·
verified ·
1 Parent(s): ab8fdef

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +2 -0
  2. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/__pycache__/__init__.cpython-38.pyc +0 -0
  3. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/__pycache__/_backends.cpython-38.pyc +0 -0
  4. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/__pycache__/_torch_specific.cpython-38.pyc +0 -0
  5. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/__init__.py +79 -0
  6. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/__pycache__/keras.cpython-38.pyc +0 -0
  7. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/_einmix.py +175 -0
  8. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/chainer.py +53 -0
  9. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/gluon.py +50 -0
  10. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/tensorflow.py +85 -0
  11. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/torch.py +62 -0
  12. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/__init__.py +15 -0
  13. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_client.py +351 -0
  14. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_client_async.py +263 -0
  15. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_credentials_async.py +112 -0
  16. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_service_account_async.py +132 -0
  17. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/challenges.py +183 -0
  18. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/reauth.py +350 -0
  19. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/service_account.py +687 -0
  20. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/sts.py +155 -0
  21. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/utils.py +171 -0
  22. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/descriptor.py +1224 -0
  23. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/descriptor_pool.py +1295 -0
  24. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/empty_pb2.py +26 -0
  25. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/field_mask_pb2.py +26 -0
  26. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/message_factory.py +185 -0
  27. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/service.py +228 -0
  28. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/service_reflection.py +295 -0
  29. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/struct_pb2.py +36 -0
  30. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/symbol_database.py +194 -0
  31. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/text_format.py +1795 -0
  32. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/config/__pycache__/__init__.cpython-38.pyc +0 -0
  33. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/config/__pycache__/extensions.cpython-38.pyc +0 -0
  34. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/config/__pycache__/plugins.cpython-38.pyc +0 -0
  35. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/fetching.cpython-38.pyc +0 -0
  36. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/findlib.cpython-38.pyc +0 -0
  37. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/format.cpython-38.pyc +0 -0
  38. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/imopen.cpython-38.pyc +0 -0
  39. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/util.cpython-38.pyc +0 -0
  40. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/v3_plugin_api.cpython-38.pyc +0 -0
  41. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/fetching.py +247 -0
  42. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/util.py +559 -0
  43. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/bsdf.cpython-38.pyc +0 -0
  44. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/dicom.cpython-38.pyc +0 -0
  45. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/example.cpython-38.pyc +0 -0
  46. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/ffmpeg.cpython-38.pyc +0 -0
  47. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/freeimagemulti.cpython-38.pyc +0 -0
  48. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/grab.cpython-38.pyc +0 -0
  49. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/pyav.cpython-38.pyc +0 -0
  50. my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/spe.cpython-38.pyc +0 -0
.gitattributes CHANGED
@@ -379,3 +379,5 @@ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/
379
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralBolIta.ttf filter=lfs diff=lfs merge=lfs -text
380
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralItalic.ttf filter=lfs diff=lfs merge=lfs -text
381
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Bold.ttf filter=lfs diff=lfs merge=lfs -text
 
 
 
379
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralBolIta.ttf filter=lfs diff=lfs merge=lfs -text
380
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/STIXGeneralItalic.ttf filter=lfs diff=lfs merge=lfs -text
381
  my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSerif-Bold.ttf filter=lfs diff=lfs merge=lfs -text
382
+ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/__pycache__/widgets.cpython-38.pyc filter=lfs diff=lfs merge=lfs -text
383
+ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/mpl-data/fonts/ttf/DejaVuSansMono-BoldOblique.ttf filter=lfs diff=lfs merge=lfs -text
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (609 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/__pycache__/_backends.cpython-38.pyc ADDED
Binary file (24.7 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/__pycache__/_torch_specific.cpython-38.pyc ADDED
Binary file (3.24 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/__init__.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __author__ = 'Alex Rogozhnikov'
2
+
3
+ import functools
4
+
5
+ from einops.einops import _apply_recipe
6
+
7
+ from ..einops import TransformRecipe, _prepare_transformation_recipe
8
+ from .. import EinopsError
9
+
10
+
11
+ class RearrangeMixin:
12
+ """
13
+ Rearrange layer behaves identically to einops.rearrange operation.
14
+
15
+ :param pattern: str, rearrangement pattern
16
+ :param axes_lengths: any additional specification of dimensions
17
+
18
+ See einops.rearrange for source_examples.
19
+ """
20
+
21
+ def __init__(self, pattern, **axes_lengths):
22
+ super().__init__()
23
+ self.pattern = pattern
24
+ self.axes_lengths = axes_lengths
25
+ self._recipe = self.recipe() # checking parameters
26
+
27
+ def __repr__(self):
28
+ params = repr(self.pattern)
29
+ for axis, length in self.axes_lengths.items():
30
+ params += ', {}={}'.format(axis, length)
31
+ return '{}({})'.format(self.__class__.__name__, params)
32
+
33
+ @functools.lru_cache(maxsize=1024)
34
+ def recipe(self) -> TransformRecipe:
35
+ try:
36
+ hashable_lengths = tuple(sorted(self.axes_lengths.items()))
37
+ return _prepare_transformation_recipe(self.pattern, operation='rearrange', axes_lengths=hashable_lengths)
38
+ except EinopsError as e:
39
+ raise EinopsError(' Error while preparing {!r}\n {}'.format(self, e))
40
+
41
+ def _apply_recipe(self, x):
42
+ return _apply_recipe(self._recipe, x, reduction_type='rearrange')
43
+
44
+
45
+ class ReduceMixin:
46
+ """
47
+ Reduce layer behaves identically to einops.reduce operation.
48
+
49
+ :param pattern: str, rearrangement pattern
50
+ :param reduction: one of available reductions ('min', 'max', 'sum', 'mean', 'prod'), case-sensitive
51
+ :param axes_lengths: any additional specification of dimensions
52
+
53
+ See einops.reduce for source_examples.
54
+ """
55
+
56
+ def __init__(self, pattern, reduction, **axes_lengths):
57
+ super().__init__()
58
+ self.pattern = pattern
59
+ self.reduction = reduction
60
+ self.axes_lengths = axes_lengths
61
+ self._recipe = self.recipe() # checking parameters
62
+
63
+ def __repr__(self):
64
+ params = '{!r}, {!r}'.format(self.pattern, self.reduction)
65
+ for axis, length in self.axes_lengths.items():
66
+ params += ', {}={}'.format(axis, length)
67
+ return '{}({})'.format(self.__class__.__name__, params)
68
+
69
+ @functools.lru_cache(maxsize=1024)
70
+ def recipe(self) -> TransformRecipe:
71
+ try:
72
+ hashable_lengths = tuple(sorted(self.axes_lengths.items()))
73
+ return _prepare_transformation_recipe(
74
+ self.pattern, operation=self.reduction, axes_lengths=hashable_lengths)
75
+ except EinopsError as e:
76
+ raise EinopsError(' Error while preparing {!r}\n {}'.format(self, e))
77
+
78
+ def _apply_recipe(self, x):
79
+ return _apply_recipe(self._recipe, x, reduction_type=self.reduction)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/__pycache__/keras.cpython-38.pyc ADDED
Binary file (351 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/_einmix.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Dict
2
+
3
+ from einops import EinopsError
4
+ from einops.parsing import ParsedExpression
5
+ import warnings
6
+ import string
7
+ from ..einops import _product
8
+
9
+
10
+ def _report_axes(axes: set, report_message: str):
11
+ if len(axes) > 0:
12
+ raise EinopsError(report_message.format(axes))
13
+
14
+
15
+ class _EinmixMixin:
16
+ def __init__(self, pattern, weight_shape, bias_shape=None, **axes_lengths):
17
+ """
18
+ EinMix - Einstein summation with automated tensor management and axis packing/unpacking.
19
+
20
+ EinMix is an advanced tool, helpful tutorial:
21
+ https://github.com/arogozhnikov/einops/blob/master/docs/3-einmix-layer.ipynb
22
+
23
+ Imagine taking einsum with two arguments, one of each input, and one - tensor with weights
24
+ >>> einsum('time batch channel_in, channel_in channel_out -> time batch channel_out', input, weight)
25
+
26
+ This layer manages weights for you, syntax highlights separate role of weight matrix
27
+ >>> EinMix('time batch channel_in -> time batch channel_out', weight_shape='channel_in channel_out')
28
+ But otherwise it is the same einsum under the hood.
29
+
30
+ Simple linear layer with bias term (you have one like that in your framework)
31
+ >>> EinMix('t b cin -> t b cout', weight_shape='cin cout', bias_shape='cout', cin=10, cout=20)
32
+ There is restriction to mix the last axis. Let's mix along height
33
+ >>> EinMix('h w c-> hout w c', weight_shape='h hout', bias_shape='hout', h=32, hout=32)
34
+ Channel-wise multiplication (like one used in normalizations)
35
+ >>> EinMix('t b c -> t b c', weight_shape='c', c=128)
36
+ Separate dense layer within each head, no connection between different heads
37
+ >>> EinMix('t b (head cin) -> t b (head cout)', weight_shape='head cin cout', ...)
38
+
39
+ ... ah yes, you need to specify all dimensions of weight shape/bias shape in parameters.
40
+
41
+ Use cases:
42
+ - when channel dimension is not last, use EinMix, not transposition
43
+ - patch/segment embeddings
44
+ - when need only within-group connections to reduce number of weights and computations
45
+ - perfect as a part of sequential models
46
+ - next-gen MLPs (follow tutorial to learn more)
47
+
48
+ Uniform He initialization is applied to weight tensor and encounters for number of elements mixed.
49
+
50
+ Parameters
51
+ :param pattern: transformation pattern, left side - dimensions of input, right side - dimensions of output
52
+ :param weight_shape: axes of weight. Tensor od this shape is created, stored, and optimized in a layer
53
+ :param bias_shape: axes of bias added to output.
54
+ :param axes_lengths: dimensions of weight tensor
55
+ """
56
+ super().__init__()
57
+ self.pattern = pattern
58
+ self.weight_shape = weight_shape
59
+ self.bias_shape = bias_shape
60
+ self.axes_lengths = axes_lengths
61
+
62
+ left_pattern, right_pattern = pattern.split('->')
63
+ left = ParsedExpression(left_pattern)
64
+ right = ParsedExpression(right_pattern)
65
+ weight = ParsedExpression(weight_shape)
66
+ _report_axes(
67
+ set.difference(right.identifiers, {*left.identifiers, *weight.identifiers}),
68
+ 'Unrecognized identifiers on the right side of EinMix {}'
69
+ )
70
+
71
+ if left.has_ellipsis or right.has_ellipsis or weight.has_ellipsis:
72
+ raise EinopsError('Ellipsis is not supported in EinMix (right now)')
73
+ if any(x.has_non_unitary_anonymous_axes for x in [left, right, weight]):
74
+ raise EinopsError('Anonymous axes (numbers) are not allowed in EinMix')
75
+ if '(' in weight_shape or ')' in weight_shape:
76
+ raise EinopsError(f'Parenthesis is not allowed in weight shape: {weight_shape}')
77
+
78
+ pre_reshape_pattern = None
79
+ pre_reshape_lengths = None
80
+ post_reshape_pattern = None
81
+ if any(len(group) != 1 for group in left.composition):
82
+ names = []
83
+ for group in left.composition:
84
+ names += group
85
+ composition = ' '.join(names)
86
+ pre_reshape_pattern = f'{left_pattern}->{composition}'
87
+ pre_reshape_lengths = {name: length for name, length in self.axes_lengths.items() if name in names}
88
+
89
+ if any(len(group) != 1 for group in right.composition):
90
+ names = []
91
+ for group in right.composition:
92
+ names += group
93
+ composition = ' '.join(names)
94
+ post_reshape_pattern = f'{composition}->{right_pattern}'
95
+
96
+ self._create_rearrange_layers(pre_reshape_pattern, pre_reshape_lengths, post_reshape_pattern, {})
97
+
98
+ for axis in weight.identifiers:
99
+ if axis not in axes_lengths:
100
+ raise EinopsError('Dimension {} of weight should be specified'.format(axis))
101
+ _report_axes(
102
+ set.difference(set(axes_lengths), {*left.identifiers, *weight.identifiers}),
103
+ 'Axes {} are not used in pattern',
104
+ )
105
+ _report_axes(
106
+ set.difference(weight.identifiers, {*left.identifiers, *right.identifiers}),
107
+ 'Weight axes {} are redundant'
108
+ )
109
+ if len(weight.identifiers) == 0:
110
+ warnings.warn('EinMix: weight has no dimensions (means multiplication by a number)')
111
+
112
+ _weight_shape = [axes_lengths[axis] for axis, in weight.composition]
113
+ # single output element is a combination of fan_in input elements
114
+ _fan_in = _product([axes_lengths[axis] for axis, in weight.composition if axis not in right.identifiers])
115
+ if bias_shape is not None:
116
+ if not isinstance(bias_shape, str):
117
+ raise EinopsError('bias shape should be string specifying which axes bias depends on')
118
+ bias = ParsedExpression(bias_shape)
119
+ _report_axes(
120
+ set.difference(bias.identifiers, right.identifiers),
121
+ 'Bias axes {} not present in output'
122
+ )
123
+ _report_axes(
124
+ set.difference(bias.identifiers, set(axes_lengths)),
125
+ 'Sizes not provided for bias axes {}',
126
+ )
127
+
128
+ _bias_shape = []
129
+ for axes in right.composition:
130
+ for axis in axes:
131
+ if axis in bias.identifiers:
132
+ _bias_shape.append(axes_lengths[axis])
133
+ else:
134
+ _bias_shape.append(1)
135
+ else:
136
+ _bias_shape = None
137
+ _bias_input_size = None
138
+
139
+ weight_bound = (3 / _fan_in) ** 0.5
140
+ bias_bound = (1 / _fan_in) ** 0.5
141
+ self._create_parameters(_weight_shape, weight_bound, _bias_shape, bias_bound)
142
+
143
+ # rewrite einsum expression with single-letter latin identifiers so that
144
+ # expression will be understood by any framework
145
+ mapping2letters = {*left.identifiers, *right.identifiers, *weight.identifiers}
146
+ mapping2letters = {k: letter for letter, k in zip(string.ascii_lowercase, mapping2letters)}
147
+
148
+ def write_flat(axes: list):
149
+ return ''.join(mapping2letters[axis] for axis in axes)
150
+
151
+ self.einsum_pattern: str = '{},{}->{}'.format(
152
+ write_flat(left.flat_axes_order()),
153
+ write_flat(weight.flat_axes_order()),
154
+ write_flat(right.flat_axes_order()),
155
+ )
156
+
157
+ def _create_rearrange_layers(self,
158
+ pre_reshape_pattern: Optional[str],
159
+ pre_reshape_lengths: Optional[Dict],
160
+ post_reshape_pattern: Optional[str],
161
+ post_reshape_lengths: Optional[Dict]):
162
+ raise NotImplementedError('Should be defined in framework implementations')
163
+
164
+ def _create_parameters(self, weight_shape, weight_bound, bias_shape, bias_bound):
165
+ """ Shape and implementations """
166
+ raise NotImplementedError('Should be defined in framework implementations')
167
+
168
+ def __repr__(self):
169
+ params = repr(self.pattern)
170
+ params += f", '{self.weight_shape}'"
171
+ if self.bias_shape is not None:
172
+ params += f", '{self.bias_shape}'"
173
+ for axis, length in self.axes_lengths.items():
174
+ params += ', {}={}'.format(axis, length)
175
+ return '{}({})'.format(self.__class__.__name__, params)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/chainer.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Dict
2
+
3
+ import chainer
4
+
5
+ from . import RearrangeMixin, ReduceMixin
6
+ from ._einmix import _EinmixMixin
7
+
8
+ __author__ = 'Alex Rogozhnikov'
9
+
10
+
11
+ class Rearrange(RearrangeMixin, chainer.Link):
12
+ def __call__(self, x):
13
+ return self._apply_recipe(x)
14
+
15
+
16
+ class Reduce(ReduceMixin, chainer.Link):
17
+ def __call__(self, x):
18
+ return self._apply_recipe(x)
19
+
20
+
21
+ class EinMix(_EinmixMixin, chainer.Link):
22
+ def _create_parameters(self, weight_shape, weight_bound, bias_shape, bias_bound):
23
+ uniform = chainer.variable.initializers.Uniform
24
+ with self.init_scope():
25
+ self.weight = chainer.variable.Parameter(uniform(weight_bound), weight_shape)
26
+ if bias_shape is not None:
27
+ self.bias = chainer.variable.Parameter(uniform(bias_bound), bias_shape)
28
+ else:
29
+ self.bias = None
30
+
31
+ def _create_rearrange_layers(self,
32
+ pre_reshape_pattern: Optional[str],
33
+ pre_reshape_lengths: Optional[Dict],
34
+ post_reshape_pattern: Optional[str],
35
+ post_reshape_lengths: Optional[Dict],
36
+ ):
37
+ self.pre_rearrange = None
38
+ if pre_reshape_pattern is not None:
39
+ self.pre_rearrange = Rearrange(pre_reshape_pattern, **pre_reshape_lengths)
40
+
41
+ self.post_rearrange = None
42
+ if post_reshape_pattern is not None:
43
+ self.post_rearrange = Rearrange(post_reshape_pattern)
44
+
45
+ def __call__(self, input):
46
+ if self.pre_rearrange is not None:
47
+ input = self.pre_rearrange(input)
48
+ result = chainer.functions.einsum(self.einsum_pattern, input, self.weight)
49
+ if self.bias is not None:
50
+ result = result + self.bias
51
+ if self.post_rearrange is not None:
52
+ result = self.post_rearrange(result)
53
+ return result
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/gluon.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Dict
2
+
3
+ import mxnet
4
+
5
+ from . import RearrangeMixin, ReduceMixin
6
+ from ._einmix import _EinmixMixin
7
+
8
+ __author__ = 'Alex Rogozhnikov'
9
+
10
+
11
+ class Rearrange(RearrangeMixin, mxnet.gluon.HybridBlock):
12
+ def hybrid_forward(self, F, x):
13
+ return self._apply_recipe(x)
14
+
15
+
16
+ class Reduce(ReduceMixin, mxnet.gluon.HybridBlock):
17
+ def hybrid_forward(self, F, x):
18
+ return self._apply_recipe(x)
19
+
20
+
21
+ class EinMix(_EinmixMixin, mxnet.gluon.HybridBlock):
22
+ def _create_parameters(self, weight_shape, weight_bound, bias_shape, bias_bound):
23
+ with self.name_scope():
24
+
25
+ self.weight = self.params.get(name='weight', shape=weight_shape,
26
+ init=mxnet.initializer.Uniform(weight_bound),
27
+ )
28
+ if bias_shape is not None:
29
+ self.bias = self.params.get(name='bias', shape=bias_shape,
30
+ init=mxnet.initializer.Uniform(bias_bound),
31
+ )
32
+ else:
33
+ self.bias = None
34
+
35
+ def _create_rearrange_layers(self,
36
+ pre_reshape_pattern: Optional[str],
37
+ pre_reshape_lengths: Optional[Dict],
38
+ post_reshape_pattern: Optional[str],
39
+ post_reshape_lengths: Optional[Dict]):
40
+ if (pre_reshape_pattern is not None) or (post_reshape_pattern is not None):
41
+ raise NotImplementedError("EinMix in mxnet/gluon doesn't support axis group/ungroup "
42
+ "because einsum in gluon defined only for mx.np.ndarrays")
43
+
44
+ def hybrid_forward(self, F, x, *args, **kwargs):
45
+ # mxnet.np can't work with 'usual' ndarrays; .data() is a standard way to get within in gluon
46
+ # .as_np_mndarray makes the necessary conversion
47
+ result = mxnet.np.einsum(self.einsum_pattern, x.as_np_ndarray(), self.weight.data())
48
+ if self.bias is not None:
49
+ result += self.bias.data()
50
+ return result
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/tensorflow.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional, Dict
2
+
3
+ import tensorflow as tf
4
+ from tensorflow.keras.layers import Layer
5
+
6
+ from .._backends import UnknownSize
7
+ from . import RearrangeMixin, ReduceMixin
8
+ from ._einmix import _EinmixMixin
9
+ from ..einops import TransformRecipe, _reconstruct_from_shape_uncached
10
+
11
+ __author__ = 'Alex Rogozhnikov'
12
+
13
+
14
+ def _compute_output_shape(recipe: TransformRecipe, input_shape) -> List[Optional[int]]:
15
+ input_shape = [UnknownSize() if d is None else int(d) for d in input_shape]
16
+ init_shapes, reduced_axes, axes_reordering, added_axes, final_shape = \
17
+ _reconstruct_from_shape_uncached(recipe, input_shape)
18
+ output_shape: List[Optional[int]] = [None if isinstance(d, UnknownSize) else int(d) for d in final_shape]
19
+ return output_shape
20
+
21
+
22
+ class Rearrange(RearrangeMixin, Layer):
23
+ def compute_output_shape(self, input_shape):
24
+ return _compute_output_shape(self.recipe(), input_shape)
25
+
26
+ def call(self, inputs):
27
+ return self._apply_recipe(inputs)
28
+
29
+ def get_config(self):
30
+ return {'pattern': self.pattern, **self.axes_lengths}
31
+
32
+
33
+ class Reduce(ReduceMixin, Layer):
34
+ def compute_output_shape(self, input_shape):
35
+ return _compute_output_shape(self.recipe(), input_shape)
36
+
37
+ def call(self, inputs):
38
+ return self._apply_recipe(inputs)
39
+
40
+ def get_config(self):
41
+ return {'pattern': self.pattern, 'reduction': self.reduction, **self.axes_lengths}
42
+
43
+
44
+ class EinMix(_EinmixMixin, Layer):
45
+ def _create_parameters(self, weight_shape, weight_bound, bias_shape, bias_bound):
46
+ self.weight = tf.Variable(tf.random_uniform_initializer(-weight_bound, weight_bound)(shape=weight_shape),
47
+ trainable=True)
48
+ if bias_shape is not None:
49
+ self.bias = tf.Variable(tf.random_uniform_initializer(-bias_bound, bias_bound)(shape=bias_shape),
50
+ trainable=True)
51
+ else:
52
+ self.bias = None
53
+
54
+ def _create_rearrange_layers(self,
55
+ pre_reshape_pattern: Optional[str],
56
+ pre_reshape_lengths: Optional[Dict],
57
+ post_reshape_pattern: Optional[str],
58
+ post_reshape_lengths: Optional[Dict],
59
+ ):
60
+ self.pre_rearrange = None
61
+ if pre_reshape_pattern is not None:
62
+ self.pre_rearrange = Rearrange(pre_reshape_pattern, **pre_reshape_lengths)
63
+
64
+ self.post_rearrange = None
65
+ if post_reshape_pattern is not None:
66
+ self.post_rearrange = Rearrange(post_reshape_pattern, **post_reshape_lengths)
67
+
68
+ def build(self, input_shape):
69
+ pass
70
+
71
+ def call(self, inputs):
72
+ if self.pre_rearrange is not None:
73
+ inputs = self.pre_rearrange(inputs)
74
+ result = tf.einsum(self.einsum_pattern, inputs, self.weight)
75
+ if self.bias is not None:
76
+ result = result + self.bias
77
+ if self.post_rearrange is not None:
78
+ result = self.post_rearrange(result)
79
+ return result
80
+
81
+ def get_config(self):
82
+ return {'pattern': self.pattern,
83
+ 'weight_shape': self.weight_shape,
84
+ 'bias_shape': self.bias_shape,
85
+ **self.axes_lengths}
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/einops/layers/torch.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Dict
2
+
3
+ import torch
4
+
5
+ from . import RearrangeMixin, ReduceMixin
6
+ from ._einmix import _EinmixMixin
7
+ from .._torch_specific import apply_for_scriptable_torch
8
+
9
+ __author__ = 'Alex Rogozhnikov'
10
+
11
+
12
+ class Rearrange(RearrangeMixin, torch.nn.Module):
13
+ def forward(self, input):
14
+ return apply_for_scriptable_torch(self._recipe, input, reduction_type='rearrange')
15
+
16
+ def _apply_recipe(self, x):
17
+ # overriding parent method to prevent it's scripting
18
+ pass
19
+
20
+
21
+ class Reduce(ReduceMixin, torch.nn.Module):
22
+ def forward(self, input):
23
+ return apply_for_scriptable_torch(self._recipe, input, reduction_type=self.reduction)
24
+
25
+ def _apply_recipe(self, x):
26
+ # overriding parent method to prevent it's scripting
27
+ pass
28
+
29
+
30
+ class EinMix(_EinmixMixin, torch.nn.Module):
31
+ def _create_parameters(self, weight_shape, weight_bound, bias_shape, bias_bound):
32
+ self.weight = torch.nn.Parameter(torch.zeros(weight_shape).uniform_(-weight_bound, weight_bound),
33
+ requires_grad=True)
34
+ if bias_shape is not None:
35
+ self.bias = torch.nn.Parameter(torch.zeros(bias_shape).uniform_(-bias_bound, bias_bound),
36
+ requires_grad=True)
37
+ else:
38
+ self.bias = None
39
+
40
+ def _create_rearrange_layers(self,
41
+ pre_reshape_pattern: Optional[str],
42
+ pre_reshape_lengths: Optional[Dict],
43
+ post_reshape_pattern: Optional[str],
44
+ post_reshape_lengths: Optional[Dict],
45
+ ):
46
+ self.pre_rearrange = None
47
+ if pre_reshape_pattern is not None:
48
+ self.pre_rearrange = Rearrange(pre_reshape_pattern, **pre_reshape_lengths)
49
+
50
+ self.post_rearrange = None
51
+ if post_reshape_pattern is not None:
52
+ self.post_rearrange = Rearrange(post_reshape_pattern, **post_reshape_lengths)
53
+
54
+ def forward(self, input):
55
+ if self.pre_rearrange is not None:
56
+ input = self.pre_rearrange(input)
57
+ result = torch.einsum(self.einsum_pattern, input, self.weight)
58
+ if self.bias is not None:
59
+ result += self.bias
60
+ if self.post_rearrange is not None:
61
+ result = self.post_rearrange(result)
62
+ return result
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/__init__.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Google OAuth 2.0 Library for Python."""
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_client.py ADDED
@@ -0,0 +1,351 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """OAuth 2.0 client.
16
+
17
+ This is a client for interacting with an OAuth 2.0 authorization server's
18
+ token endpoint.
19
+
20
+ For more information about the token endpoint, see
21
+ `Section 3.1 of rfc6749`_
22
+
23
+ .. _Section 3.1 of rfc6749: https://tools.ietf.org/html/rfc6749#section-3.2
24
+ """
25
+
26
+ import datetime
27
+ import json
28
+
29
+ import six
30
+ from six.moves import http_client
31
+ from six.moves import urllib
32
+
33
+ from google.auth import _helpers
34
+ from google.auth import exceptions
35
+ from google.auth import jwt
36
+
37
+ _URLENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
38
+ _JSON_CONTENT_TYPE = "application/json"
39
+ _JWT_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
40
+ _REFRESH_GRANT_TYPE = "refresh_token"
41
+
42
+
43
+ def _handle_error_response(response_data):
44
+ """Translates an error response into an exception.
45
+
46
+ Args:
47
+ response_data (Mapping | str): The decoded response data.
48
+
49
+ Raises:
50
+ google.auth.exceptions.RefreshError: The errors contained in response_data.
51
+ """
52
+ if isinstance(response_data, six.string_types):
53
+ raise exceptions.RefreshError(response_data)
54
+ try:
55
+ error_details = "{}: {}".format(
56
+ response_data["error"], response_data.get("error_description")
57
+ )
58
+ # If no details could be extracted, use the response data.
59
+ except (KeyError, ValueError):
60
+ error_details = json.dumps(response_data)
61
+
62
+ raise exceptions.RefreshError(error_details, response_data)
63
+
64
+
65
+ def _parse_expiry(response_data):
66
+ """Parses the expiry field from a response into a datetime.
67
+
68
+ Args:
69
+ response_data (Mapping): The JSON-parsed response data.
70
+
71
+ Returns:
72
+ Optional[datetime]: The expiration or ``None`` if no expiration was
73
+ specified.
74
+ """
75
+ expires_in = response_data.get("expires_in", None)
76
+
77
+ if expires_in is not None:
78
+ return _helpers.utcnow() + datetime.timedelta(seconds=expires_in)
79
+ else:
80
+ return None
81
+
82
+
83
+ def _token_endpoint_request_no_throw(
84
+ request, token_uri, body, access_token=None, use_json=False, **kwargs
85
+ ):
86
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
87
+ This function doesn't throw on response errors.
88
+
89
+ Args:
90
+ request (google.auth.transport.Request): A callable used to make
91
+ HTTP requests.
92
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
93
+ URI.
94
+ body (Mapping[str, str]): The parameters to send in the request body.
95
+ access_token (Optional(str)): The access token needed to make the request.
96
+ use_json (Optional(bool)): Use urlencoded format or json format for the
97
+ content type. The default value is False.
98
+ kwargs: Additional arguments passed on to the request method. The
99
+ kwargs will be passed to `requests.request` method, see:
100
+ https://docs.python-requests.org/en/latest/api/#requests.request.
101
+ For example, you can use `cert=("cert_pem_path", "key_pem_path")`
102
+ to set up client side SSL certificate, and use
103
+ `verify="ca_bundle_path"` to set up the CA certificates for sever
104
+ side SSL certificate verification.
105
+
106
+ Returns:
107
+ Tuple(bool, Mapping[str, str]): A boolean indicating if the request is
108
+ successful, and a mapping for the JSON-decoded response data.
109
+ """
110
+ if use_json:
111
+ headers = {"Content-Type": _JSON_CONTENT_TYPE}
112
+ body = json.dumps(body).encode("utf-8")
113
+ else:
114
+ headers = {"Content-Type": _URLENCODED_CONTENT_TYPE}
115
+ body = urllib.parse.urlencode(body).encode("utf-8")
116
+
117
+ if access_token:
118
+ headers["Authorization"] = "Bearer {}".format(access_token)
119
+
120
+ retry = 0
121
+ # retry to fetch token for maximum of two times if any internal failure
122
+ # occurs.
123
+ while True:
124
+ response = request(
125
+ method="POST", url=token_uri, headers=headers, body=body, **kwargs
126
+ )
127
+ response_body = (
128
+ response.data.decode("utf-8")
129
+ if hasattr(response.data, "decode")
130
+ else response.data
131
+ )
132
+
133
+ if response.status == http_client.OK:
134
+ # response_body should be a JSON
135
+ response_data = json.loads(response_body)
136
+ break
137
+ else:
138
+ # For a failed response, response_body could be a string
139
+ try:
140
+ response_data = json.loads(response_body)
141
+ error_desc = response_data.get("error_description") or ""
142
+ error_code = response_data.get("error") or ""
143
+ if (
144
+ any(e == "internal_failure" for e in (error_code, error_desc))
145
+ and retry < 1
146
+ ):
147
+ retry += 1
148
+ continue
149
+ except ValueError:
150
+ response_data = response_body
151
+ return False, response_data
152
+
153
+ return True, response_data
154
+
155
+
156
+ def _token_endpoint_request(
157
+ request, token_uri, body, access_token=None, use_json=False, **kwargs
158
+ ):
159
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
160
+
161
+ Args:
162
+ request (google.auth.transport.Request): A callable used to make
163
+ HTTP requests.
164
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
165
+ URI.
166
+ body (Mapping[str, str]): The parameters to send in the request body.
167
+ access_token (Optional(str)): The access token needed to make the request.
168
+ use_json (Optional(bool)): Use urlencoded format or json format for the
169
+ content type. The default value is False.
170
+ kwargs: Additional arguments passed on to the request method. The
171
+ kwargs will be passed to `requests.request` method, see:
172
+ https://docs.python-requests.org/en/latest/api/#requests.request.
173
+ For example, you can use `cert=("cert_pem_path", "key_pem_path")`
174
+ to set up client side SSL certificate, and use
175
+ `verify="ca_bundle_path"` to set up the CA certificates for sever
176
+ side SSL certificate verification.
177
+
178
+ Returns:
179
+ Mapping[str, str]: The JSON-decoded response data.
180
+
181
+ Raises:
182
+ google.auth.exceptions.RefreshError: If the token endpoint returned
183
+ an error.
184
+ """
185
+ response_status_ok, response_data = _token_endpoint_request_no_throw(
186
+ request, token_uri, body, access_token=access_token, use_json=use_json, **kwargs
187
+ )
188
+ if not response_status_ok:
189
+ _handle_error_response(response_data)
190
+ return response_data
191
+
192
+
193
+ def jwt_grant(request, token_uri, assertion):
194
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants.
195
+
196
+ For more details, see `rfc7523 section 4`_.
197
+
198
+ Args:
199
+ request (google.auth.transport.Request): A callable used to make
200
+ HTTP requests.
201
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
202
+ URI.
203
+ assertion (str): The OAuth 2.0 assertion.
204
+
205
+ Returns:
206
+ Tuple[str, Optional[datetime], Mapping[str, str]]: The access token,
207
+ expiration, and additional data returned by the token endpoint.
208
+
209
+ Raises:
210
+ google.auth.exceptions.RefreshError: If the token endpoint returned
211
+ an error.
212
+
213
+ .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
214
+ """
215
+ body = {"assertion": assertion, "grant_type": _JWT_GRANT_TYPE}
216
+
217
+ response_data = _token_endpoint_request(request, token_uri, body)
218
+
219
+ try:
220
+ access_token = response_data["access_token"]
221
+ except KeyError as caught_exc:
222
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
223
+ six.raise_from(new_exc, caught_exc)
224
+
225
+ expiry = _parse_expiry(response_data)
226
+
227
+ return access_token, expiry, response_data
228
+
229
+
230
+ def id_token_jwt_grant(request, token_uri, assertion):
231
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but
232
+ requests an OpenID Connect ID Token instead of an access token.
233
+
234
+ This is a variant on the standard JWT Profile that is currently unique
235
+ to Google. This was added for the benefit of authenticating to services
236
+ that require ID Tokens instead of access tokens or JWT bearer tokens.
237
+
238
+ Args:
239
+ request (google.auth.transport.Request): A callable used to make
240
+ HTTP requests.
241
+ token_uri (str): The OAuth 2.0 authorization server's token endpoint
242
+ URI.
243
+ assertion (str): JWT token signed by a service account. The token's
244
+ payload must include a ``target_audience`` claim.
245
+
246
+ Returns:
247
+ Tuple[str, Optional[datetime], Mapping[str, str]]:
248
+ The (encoded) Open ID Connect ID Token, expiration, and additional
249
+ data returned by the endpoint.
250
+
251
+ Raises:
252
+ google.auth.exceptions.RefreshError: If the token endpoint returned
253
+ an error.
254
+ """
255
+ body = {"assertion": assertion, "grant_type": _JWT_GRANT_TYPE}
256
+
257
+ response_data = _token_endpoint_request(request, token_uri, body)
258
+
259
+ try:
260
+ id_token = response_data["id_token"]
261
+ except KeyError as caught_exc:
262
+ new_exc = exceptions.RefreshError("No ID token in response.", response_data)
263
+ six.raise_from(new_exc, caught_exc)
264
+
265
+ payload = jwt.decode(id_token, verify=False)
266
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
267
+
268
+ return id_token, expiry, response_data
269
+
270
+
271
+ def _handle_refresh_grant_response(response_data, refresh_token):
272
+ """Extract tokens from refresh grant response.
273
+
274
+ Args:
275
+ response_data (Mapping[str, str]): Refresh grant response data.
276
+ refresh_token (str): Current refresh token.
277
+
278
+ Returns:
279
+ Tuple[str, str, Optional[datetime], Mapping[str, str]]: The access token,
280
+ refresh token, expiration, and additional data returned by the token
281
+ endpoint. If response_data doesn't have refresh token, then the current
282
+ refresh token will be returned.
283
+
284
+ Raises:
285
+ google.auth.exceptions.RefreshError: If the token endpoint returned
286
+ an error.
287
+ """
288
+ try:
289
+ access_token = response_data["access_token"]
290
+ except KeyError as caught_exc:
291
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
292
+ six.raise_from(new_exc, caught_exc)
293
+
294
+ refresh_token = response_data.get("refresh_token", refresh_token)
295
+ expiry = _parse_expiry(response_data)
296
+
297
+ return access_token, refresh_token, expiry, response_data
298
+
299
+
300
+ def refresh_grant(
301
+ request,
302
+ token_uri,
303
+ refresh_token,
304
+ client_id,
305
+ client_secret,
306
+ scopes=None,
307
+ rapt_token=None,
308
+ ):
309
+ """Implements the OAuth 2.0 refresh token grant.
310
+
311
+ For more details, see `rfc678 section 6`_.
312
+
313
+ Args:
314
+ request (google.auth.transport.Request): A callable used to make
315
+ HTTP requests.
316
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
317
+ URI.
318
+ refresh_token (str): The refresh token to use to get a new access
319
+ token.
320
+ client_id (str): The OAuth 2.0 application's client ID.
321
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
322
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
323
+ scopes must be authorized for the refresh token. Useful if refresh
324
+ token has a wild card scope (e.g.
325
+ 'https://www.googleapis.com/auth/any-api').
326
+ rapt_token (Optional(str)): The reauth Proof Token.
327
+
328
+ Returns:
329
+ Tuple[str, str, Optional[datetime], Mapping[str, str]]: The access
330
+ token, new or current refresh token, expiration, and additional data
331
+ returned by the token endpoint.
332
+
333
+ Raises:
334
+ google.auth.exceptions.RefreshError: If the token endpoint returned
335
+ an error.
336
+
337
+ .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
338
+ """
339
+ body = {
340
+ "grant_type": _REFRESH_GRANT_TYPE,
341
+ "client_id": client_id,
342
+ "client_secret": client_secret,
343
+ "refresh_token": refresh_token,
344
+ }
345
+ if scopes:
346
+ body["scope"] = " ".join(scopes)
347
+ if rapt_token:
348
+ body["rapt"] = rapt_token
349
+
350
+ response_data = _token_endpoint_request(request, token_uri, body)
351
+ return _handle_refresh_grant_response(response_data, refresh_token)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_client_async.py ADDED
@@ -0,0 +1,263 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """OAuth 2.0 async client.
16
+
17
+ This is a client for interacting with an OAuth 2.0 authorization server's
18
+ token endpoint.
19
+
20
+ For more information about the token endpoint, see
21
+ `Section 3.1 of rfc6749`_
22
+
23
+ .. _Section 3.1 of rfc6749: https://tools.ietf.org/html/rfc6749#section-3.2
24
+ """
25
+
26
+ import datetime
27
+ import json
28
+
29
+ import six
30
+ from six.moves import http_client
31
+ from six.moves import urllib
32
+
33
+ from google.auth import exceptions
34
+ from google.auth import jwt
35
+ from google.oauth2 import _client as client
36
+
37
+
38
+ async def _token_endpoint_request_no_throw(
39
+ request, token_uri, body, access_token=None, use_json=False
40
+ ):
41
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
42
+ This function doesn't throw on response errors.
43
+
44
+ Args:
45
+ request (google.auth.transport.Request): A callable used to make
46
+ HTTP requests.
47
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
48
+ URI.
49
+ body (Mapping[str, str]): The parameters to send in the request body.
50
+ access_token (Optional(str)): The access token needed to make the request.
51
+ use_json (Optional(bool)): Use urlencoded format or json format for the
52
+ content type. The default value is False.
53
+
54
+ Returns:
55
+ Tuple(bool, Mapping[str, str]): A boolean indicating if the request is
56
+ successful, and a mapping for the JSON-decoded response data.
57
+ """
58
+ if use_json:
59
+ headers = {"Content-Type": client._JSON_CONTENT_TYPE}
60
+ body = json.dumps(body).encode("utf-8")
61
+ else:
62
+ headers = {"Content-Type": client._URLENCODED_CONTENT_TYPE}
63
+ body = urllib.parse.urlencode(body).encode("utf-8")
64
+
65
+ if access_token:
66
+ headers["Authorization"] = "Bearer {}".format(access_token)
67
+
68
+ retry = 0
69
+ # retry to fetch token for maximum of two times if any internal failure
70
+ # occurs.
71
+ while True:
72
+
73
+ response = await request(
74
+ method="POST", url=token_uri, headers=headers, body=body
75
+ )
76
+
77
+ # Using data.read() resulted in zlib decompression errors. This may require future investigation.
78
+ response_body1 = await response.content()
79
+
80
+ response_body = (
81
+ response_body1.decode("utf-8")
82
+ if hasattr(response_body1, "decode")
83
+ else response_body1
84
+ )
85
+
86
+ response_data = json.loads(response_body)
87
+
88
+ if response.status == http_client.OK:
89
+ break
90
+ else:
91
+ error_desc = response_data.get("error_description") or ""
92
+ error_code = response_data.get("error") or ""
93
+ if (
94
+ any(e == "internal_failure" for e in (error_code, error_desc))
95
+ and retry < 1
96
+ ):
97
+ retry += 1
98
+ continue
99
+ return response.status == http_client.OK, response_data
100
+
101
+ return response.status == http_client.OK, response_data
102
+
103
+
104
+ async def _token_endpoint_request(
105
+ request, token_uri, body, access_token=None, use_json=False
106
+ ):
107
+ """Makes a request to the OAuth 2.0 authorization server's token endpoint.
108
+
109
+ Args:
110
+ request (google.auth.transport.Request): A callable used to make
111
+ HTTP requests.
112
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
113
+ URI.
114
+ body (Mapping[str, str]): The parameters to send in the request body.
115
+ access_token (Optional(str)): The access token needed to make the request.
116
+ use_json (Optional(bool)): Use urlencoded format or json format for the
117
+ content type. The default value is False.
118
+
119
+ Returns:
120
+ Mapping[str, str]: The JSON-decoded response data.
121
+
122
+ Raises:
123
+ google.auth.exceptions.RefreshError: If the token endpoint returned
124
+ an error.
125
+ """
126
+ response_status_ok, response_data = await _token_endpoint_request_no_throw(
127
+ request, token_uri, body, access_token=access_token, use_json=use_json
128
+ )
129
+ if not response_status_ok:
130
+ client._handle_error_response(response_data)
131
+ return response_data
132
+
133
+
134
+ async def jwt_grant(request, token_uri, assertion):
135
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants.
136
+
137
+ For more details, see `rfc7523 section 4`_.
138
+
139
+ Args:
140
+ request (google.auth.transport.Request): A callable used to make
141
+ HTTP requests.
142
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
143
+ URI.
144
+ assertion (str): The OAuth 2.0 assertion.
145
+
146
+ Returns:
147
+ Tuple[str, Optional[datetime], Mapping[str, str]]: The access token,
148
+ expiration, and additional data returned by the token endpoint.
149
+
150
+ Raises:
151
+ google.auth.exceptions.RefreshError: If the token endpoint returned
152
+ an error.
153
+
154
+ .. _rfc7523 section 4: https://tools.ietf.org/html/rfc7523#section-4
155
+ """
156
+ body = {"assertion": assertion, "grant_type": client._JWT_GRANT_TYPE}
157
+
158
+ response_data = await _token_endpoint_request(request, token_uri, body)
159
+
160
+ try:
161
+ access_token = response_data["access_token"]
162
+ except KeyError as caught_exc:
163
+ new_exc = exceptions.RefreshError("No access token in response.", response_data)
164
+ six.raise_from(new_exc, caught_exc)
165
+
166
+ expiry = client._parse_expiry(response_data)
167
+
168
+ return access_token, expiry, response_data
169
+
170
+
171
+ async def id_token_jwt_grant(request, token_uri, assertion):
172
+ """Implements the JWT Profile for OAuth 2.0 Authorization Grants, but
173
+ requests an OpenID Connect ID Token instead of an access token.
174
+
175
+ This is a variant on the standard JWT Profile that is currently unique
176
+ to Google. This was added for the benefit of authenticating to services
177
+ that require ID Tokens instead of access tokens or JWT bearer tokens.
178
+
179
+ Args:
180
+ request (google.auth.transport.Request): A callable used to make
181
+ HTTP requests.
182
+ token_uri (str): The OAuth 2.0 authorization server's token endpoint
183
+ URI.
184
+ assertion (str): JWT token signed by a service account. The token's
185
+ payload must include a ``target_audience`` claim.
186
+
187
+ Returns:
188
+ Tuple[str, Optional[datetime], Mapping[str, str]]:
189
+ The (encoded) Open ID Connect ID Token, expiration, and additional
190
+ data returned by the endpoint.
191
+
192
+ Raises:
193
+ google.auth.exceptions.RefreshError: If the token endpoint returned
194
+ an error.
195
+ """
196
+ body = {"assertion": assertion, "grant_type": client._JWT_GRANT_TYPE}
197
+
198
+ response_data = await _token_endpoint_request(request, token_uri, body)
199
+
200
+ try:
201
+ id_token = response_data["id_token"]
202
+ except KeyError as caught_exc:
203
+ new_exc = exceptions.RefreshError("No ID token in response.", response_data)
204
+ six.raise_from(new_exc, caught_exc)
205
+
206
+ payload = jwt.decode(id_token, verify=False)
207
+ expiry = datetime.datetime.utcfromtimestamp(payload["exp"])
208
+
209
+ return id_token, expiry, response_data
210
+
211
+
212
+ async def refresh_grant(
213
+ request,
214
+ token_uri,
215
+ refresh_token,
216
+ client_id,
217
+ client_secret,
218
+ scopes=None,
219
+ rapt_token=None,
220
+ ):
221
+ """Implements the OAuth 2.0 refresh token grant.
222
+
223
+ For more details, see `rfc678 section 6`_.
224
+
225
+ Args:
226
+ request (google.auth.transport.Request): A callable used to make
227
+ HTTP requests.
228
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
229
+ URI.
230
+ refresh_token (str): The refresh token to use to get a new access
231
+ token.
232
+ client_id (str): The OAuth 2.0 application's client ID.
233
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
234
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
235
+ scopes must be authorized for the refresh token. Useful if refresh
236
+ token has a wild card scope (e.g.
237
+ 'https://www.googleapis.com/auth/any-api').
238
+ rapt_token (Optional(str)): The reauth Proof Token.
239
+
240
+ Returns:
241
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str]]: The
242
+ access token, new or current refresh token, expiration, and additional data
243
+ returned by the token endpoint.
244
+
245
+ Raises:
246
+ google.auth.exceptions.RefreshError: If the token endpoint returned
247
+ an error.
248
+
249
+ .. _rfc6748 section 6: https://tools.ietf.org/html/rfc6749#section-6
250
+ """
251
+ body = {
252
+ "grant_type": client._REFRESH_GRANT_TYPE,
253
+ "client_id": client_id,
254
+ "client_secret": client_secret,
255
+ "refresh_token": refresh_token,
256
+ }
257
+ if scopes:
258
+ body["scope"] = " ".join(scopes)
259
+ if rapt_token:
260
+ body["rapt"] = rapt_token
261
+
262
+ response_data = await _token_endpoint_request(request, token_uri, body)
263
+ return client._handle_refresh_grant_response(response_data, refresh_token)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_credentials_async.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """OAuth 2.0 Async Credentials.
16
+
17
+ This module provides credentials based on OAuth 2.0 access and refresh tokens.
18
+ These credentials usually access resources on behalf of a user (resource
19
+ owner).
20
+
21
+ Specifically, this is intended to use access tokens acquired using the
22
+ `Authorization Code grant`_ and can refresh those tokens using a
23
+ optional `refresh token`_.
24
+
25
+ Obtaining the initial access and refresh token is outside of the scope of this
26
+ module. Consult `rfc6749 section 4.1`_ for complete details on the
27
+ Authorization Code grant flow.
28
+
29
+ .. _Authorization Code grant: https://tools.ietf.org/html/rfc6749#section-1.3.1
30
+ .. _refresh token: https://tools.ietf.org/html/rfc6749#section-6
31
+ .. _rfc6749 section 4.1: https://tools.ietf.org/html/rfc6749#section-4.1
32
+ """
33
+
34
+ from google.auth import _credentials_async as credentials
35
+ from google.auth import _helpers
36
+ from google.auth import exceptions
37
+ from google.oauth2 import _reauth_async as reauth
38
+ from google.oauth2 import credentials as oauth2_credentials
39
+
40
+
41
+ class Credentials(oauth2_credentials.Credentials):
42
+ """Credentials using OAuth 2.0 access and refresh tokens.
43
+
44
+ The credentials are considered immutable. If you want to modify the
45
+ quota project, use :meth:`with_quota_project` or ::
46
+
47
+ credentials = credentials.with_quota_project('myproject-123)
48
+ """
49
+
50
+ @_helpers.copy_docstring(credentials.Credentials)
51
+ async def refresh(self, request):
52
+ if (
53
+ self._refresh_token is None
54
+ or self._token_uri is None
55
+ or self._client_id is None
56
+ or self._client_secret is None
57
+ ):
58
+ raise exceptions.RefreshError(
59
+ "The credentials do not contain the necessary fields need to "
60
+ "refresh the access token. You must specify refresh_token, "
61
+ "token_uri, client_id, and client_secret."
62
+ )
63
+
64
+ (
65
+ access_token,
66
+ refresh_token,
67
+ expiry,
68
+ grant_response,
69
+ rapt_token,
70
+ ) = await reauth.refresh_grant(
71
+ request,
72
+ self._token_uri,
73
+ self._refresh_token,
74
+ self._client_id,
75
+ self._client_secret,
76
+ scopes=self._scopes,
77
+ rapt_token=self._rapt_token,
78
+ enable_reauth_refresh=self._enable_reauth_refresh,
79
+ )
80
+
81
+ self.token = access_token
82
+ self.expiry = expiry
83
+ self._refresh_token = refresh_token
84
+ self._id_token = grant_response.get("id_token")
85
+ self._rapt_token = rapt_token
86
+
87
+ if self._scopes and "scope" in grant_response:
88
+ requested_scopes = frozenset(self._scopes)
89
+ granted_scopes = frozenset(grant_response["scope"].split())
90
+ scopes_requested_but_not_granted = requested_scopes - granted_scopes
91
+ if scopes_requested_but_not_granted:
92
+ raise exceptions.RefreshError(
93
+ "Not all requested scopes were granted by the "
94
+ "authorization server, missing scopes {}.".format(
95
+ ", ".join(scopes_requested_but_not_granted)
96
+ )
97
+ )
98
+
99
+
100
+ class UserAccessTokenCredentials(oauth2_credentials.UserAccessTokenCredentials):
101
+ """Access token credentials for user account.
102
+
103
+ Obtain the access token for a given user account or the current active
104
+ user account with the ``gcloud auth print-access-token`` command.
105
+
106
+ Args:
107
+ account (Optional[str]): Account to get the access token for. If not
108
+ specified, the current active account will be used.
109
+ quota_project_id (Optional[str]): The project ID used for quota
110
+ and billing.
111
+
112
+ """
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/_service_account_async.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
16
+
17
+ NOTE: This file adds asynchronous refresh methods to both credentials
18
+ classes, and therefore async/await syntax is required when calling this
19
+ method when using service account credentials with asynchronous functionality.
20
+ Otherwise, all other methods are inherited from the regular service account
21
+ credentials file google.oauth2.service_account
22
+
23
+ """
24
+
25
+ from google.auth import _credentials_async as credentials_async
26
+ from google.auth import _helpers
27
+ from google.oauth2 import _client_async
28
+ from google.oauth2 import service_account
29
+
30
+
31
+ class Credentials(
32
+ service_account.Credentials, credentials_async.Scoped, credentials_async.Credentials
33
+ ):
34
+ """Service account credentials
35
+
36
+ Usually, you'll create these credentials with one of the helper
37
+ constructors. To create credentials using a Google service account
38
+ private key JSON file::
39
+
40
+ credentials = _service_account_async.Credentials.from_service_account_file(
41
+ 'service-account.json')
42
+
43
+ Or if you already have the service account file loaded::
44
+
45
+ service_account_info = json.load(open('service_account.json'))
46
+ credentials = _service_account_async.Credentials.from_service_account_info(
47
+ service_account_info)
48
+
49
+ Both helper methods pass on arguments to the constructor, so you can
50
+ specify additional scopes and a subject if necessary::
51
+
52
+ credentials = _service_account_async.Credentials.from_service_account_file(
53
+ 'service-account.json',
54
+ scopes=['email'],
55
+ subject='user@example.com')
56
+
57
+ The credentials are considered immutable. If you want to modify the scopes
58
+ or the subject used for delegation, use :meth:`with_scopes` or
59
+ :meth:`with_subject`::
60
+
61
+ scoped_credentials = credentials.with_scopes(['email'])
62
+ delegated_credentials = credentials.with_subject(subject)
63
+
64
+ To add a quota project, use :meth:`with_quota_project`::
65
+
66
+ credentials = credentials.with_quota_project('myproject-123')
67
+ """
68
+
69
+ @_helpers.copy_docstring(credentials_async.Credentials)
70
+ async def refresh(self, request):
71
+ assertion = self._make_authorization_grant_assertion()
72
+ access_token, expiry, _ = await _client_async.jwt_grant(
73
+ request, self._token_uri, assertion
74
+ )
75
+ self.token = access_token
76
+ self.expiry = expiry
77
+
78
+
79
+ class IDTokenCredentials(
80
+ service_account.IDTokenCredentials,
81
+ credentials_async.Signing,
82
+ credentials_async.Credentials,
83
+ ):
84
+ """Open ID Connect ID Token-based service account credentials.
85
+
86
+ These credentials are largely similar to :class:`.Credentials`, but instead
87
+ of using an OAuth 2.0 Access Token as the bearer token, they use an Open
88
+ ID Connect ID Token as the bearer token. These credentials are useful when
89
+ communicating to services that require ID Tokens and can not accept access
90
+ tokens.
91
+
92
+ Usually, you'll create these credentials with one of the helper
93
+ constructors. To create credentials using a Google service account
94
+ private key JSON file::
95
+
96
+ credentials = (
97
+ _service_account_async.IDTokenCredentials.from_service_account_file(
98
+ 'service-account.json'))
99
+
100
+ Or if you already have the service account file loaded::
101
+
102
+ service_account_info = json.load(open('service_account.json'))
103
+ credentials = (
104
+ _service_account_async.IDTokenCredentials.from_service_account_info(
105
+ service_account_info))
106
+
107
+ Both helper methods pass on arguments to the constructor, so you can
108
+ specify additional scopes and a subject if necessary::
109
+
110
+ credentials = (
111
+ _service_account_async.IDTokenCredentials.from_service_account_file(
112
+ 'service-account.json',
113
+ scopes=['email'],
114
+ subject='user@example.com'))
115
+
116
+ The credentials are considered immutable. If you want to modify the scopes
117
+ or the subject used for delegation, use :meth:`with_scopes` or
118
+ :meth:`with_subject`::
119
+
120
+ scoped_credentials = credentials.with_scopes(['email'])
121
+ delegated_credentials = credentials.with_subject(subject)
122
+
123
+ """
124
+
125
+ @_helpers.copy_docstring(credentials_async.Credentials)
126
+ async def refresh(self, request):
127
+ assertion = self._make_authorization_grant_assertion()
128
+ access_token, expiry, _ = await _client_async.id_token_jwt_grant(
129
+ request, self._token_uri, assertion
130
+ )
131
+ self.token = access_token
132
+ self.expiry = expiry
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/challenges.py ADDED
@@ -0,0 +1,183 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """ Challenges for reauthentication.
16
+ """
17
+
18
+ import abc
19
+ import base64
20
+ import getpass
21
+ import sys
22
+
23
+ import six
24
+
25
+ from google.auth import _helpers
26
+ from google.auth import exceptions
27
+
28
+
29
+ REAUTH_ORIGIN = "https://accounts.google.com"
30
+ SAML_CHALLENGE_MESSAGE = (
31
+ "Please run `gcloud auth login` to complete reauthentication with SAML."
32
+ )
33
+
34
+
35
+ def get_user_password(text):
36
+ """Get password from user.
37
+
38
+ Override this function with a different logic if you are using this library
39
+ outside a CLI.
40
+
41
+ Args:
42
+ text (str): message for the password prompt.
43
+
44
+ Returns:
45
+ str: password string.
46
+ """
47
+ return getpass.getpass(text)
48
+
49
+
50
+ @six.add_metaclass(abc.ABCMeta)
51
+ class ReauthChallenge(object):
52
+ """Base class for reauth challenges."""
53
+
54
+ @property
55
+ @abc.abstractmethod
56
+ def name(self): # pragma: NO COVER
57
+ """Returns the name of the challenge."""
58
+ raise NotImplementedError("name property must be implemented")
59
+
60
+ @property
61
+ @abc.abstractmethod
62
+ def is_locally_eligible(self): # pragma: NO COVER
63
+ """Returns true if a challenge is supported locally on this machine."""
64
+ raise NotImplementedError("is_locally_eligible property must be implemented")
65
+
66
+ @abc.abstractmethod
67
+ def obtain_challenge_input(self, metadata): # pragma: NO COVER
68
+ """Performs logic required to obtain credentials and returns it.
69
+
70
+ Args:
71
+ metadata (Mapping): challenge metadata returned in the 'challenges' field in
72
+ the initial reauth request. Includes the 'challengeType' field
73
+ and other challenge-specific fields.
74
+
75
+ Returns:
76
+ response that will be send to the reauth service as the content of
77
+ the 'proposalResponse' field in the request body. Usually a dict
78
+ with the keys specific to the challenge. For example,
79
+ ``{'credential': password}`` for password challenge.
80
+ """
81
+ raise NotImplementedError("obtain_challenge_input method must be implemented")
82
+
83
+
84
+ class PasswordChallenge(ReauthChallenge):
85
+ """Challenge that asks for user's password."""
86
+
87
+ @property
88
+ def name(self):
89
+ return "PASSWORD"
90
+
91
+ @property
92
+ def is_locally_eligible(self):
93
+ return True
94
+
95
+ @_helpers.copy_docstring(ReauthChallenge)
96
+ def obtain_challenge_input(self, unused_metadata):
97
+ passwd = get_user_password("Please enter your password:")
98
+ if not passwd:
99
+ passwd = " " # avoid the server crashing in case of no password :D
100
+ return {"credential": passwd}
101
+
102
+
103
+ class SecurityKeyChallenge(ReauthChallenge):
104
+ """Challenge that asks for user's security key touch."""
105
+
106
+ @property
107
+ def name(self):
108
+ return "SECURITY_KEY"
109
+
110
+ @property
111
+ def is_locally_eligible(self):
112
+ return True
113
+
114
+ @_helpers.copy_docstring(ReauthChallenge)
115
+ def obtain_challenge_input(self, metadata):
116
+ try:
117
+ import pyu2f.convenience.authenticator # type: ignore
118
+ import pyu2f.errors # type: ignore
119
+ import pyu2f.model # type: ignore
120
+ except ImportError:
121
+ raise exceptions.ReauthFailError(
122
+ "pyu2f dependency is required to use Security key reauth feature. "
123
+ "It can be installed via `pip install pyu2f` or `pip install google-auth[reauth]`."
124
+ )
125
+ sk = metadata["securityKey"]
126
+ challenges = sk["challenges"]
127
+ app_id = sk["applicationId"]
128
+
129
+ challenge_data = []
130
+ for c in challenges:
131
+ kh = c["keyHandle"].encode("ascii")
132
+ key = pyu2f.model.RegisteredKey(bytearray(base64.urlsafe_b64decode(kh)))
133
+ challenge = c["challenge"].encode("ascii")
134
+ challenge = base64.urlsafe_b64decode(challenge)
135
+ challenge_data.append({"key": key, "challenge": challenge})
136
+
137
+ try:
138
+ api = pyu2f.convenience.authenticator.CreateCompositeAuthenticator(
139
+ REAUTH_ORIGIN
140
+ )
141
+ response = api.Authenticate(
142
+ app_id, challenge_data, print_callback=sys.stderr.write
143
+ )
144
+ return {"securityKey": response}
145
+ except pyu2f.errors.U2FError as e:
146
+ if e.code == pyu2f.errors.U2FError.DEVICE_INELIGIBLE:
147
+ sys.stderr.write("Ineligible security key.\n")
148
+ elif e.code == pyu2f.errors.U2FError.TIMEOUT:
149
+ sys.stderr.write("Timed out while waiting for security key touch.\n")
150
+ else:
151
+ raise e
152
+ except pyu2f.errors.NoDeviceFoundError:
153
+ sys.stderr.write("No security key found.\n")
154
+ return None
155
+
156
+
157
+ class SamlChallenge(ReauthChallenge):
158
+ """Challenge that asks the users to browse to their ID Providers.
159
+
160
+ Currently SAML challenge is not supported. When obtaining the challenge
161
+ input, exception will be raised to instruct the users to run
162
+ `gcloud auth login` for reauthentication.
163
+ """
164
+
165
+ @property
166
+ def name(self):
167
+ return "SAML"
168
+
169
+ @property
170
+ def is_locally_eligible(self):
171
+ return True
172
+
173
+ def obtain_challenge_input(self, metadata):
174
+ # Magic Arch has not fully supported returning a proper dedirect URL
175
+ # for programmatic SAML users today. So we error our here and request
176
+ # users to use gcloud to complete a login.
177
+ raise exceptions.ReauthSamlChallengeFailError(SAML_CHALLENGE_MESSAGE)
178
+
179
+
180
+ AVAILABLE_CHALLENGES = {
181
+ challenge.name: challenge
182
+ for challenge in [SecurityKeyChallenge(), PasswordChallenge(), SamlChallenge()]
183
+ }
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/reauth.py ADDED
@@ -0,0 +1,350 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2021 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """A module that provides functions for handling rapt authentication.
16
+
17
+ Reauth is a process of obtaining additional authentication (such as password,
18
+ security token, etc.) while refreshing OAuth 2.0 credentials for a user.
19
+
20
+ Credentials that use the Reauth flow must have the reauth scope,
21
+ ``https://www.googleapis.com/auth/accounts.reauth``.
22
+
23
+ This module provides a high-level function for executing the Reauth process,
24
+ :func:`refresh_grant`, and lower-level helpers for doing the individual
25
+ steps of the reauth process.
26
+
27
+ Those steps are:
28
+
29
+ 1. Obtaining a list of challenges from the reauth server.
30
+ 2. Running through each challenge and sending the result back to the reauth
31
+ server.
32
+ 3. Refreshing the access token using the returned rapt token.
33
+ """
34
+
35
+ import sys
36
+
37
+ from six.moves import range
38
+
39
+ from google.auth import exceptions
40
+ from google.oauth2 import _client
41
+ from google.oauth2 import challenges
42
+
43
+
44
+ _REAUTH_SCOPE = "https://www.googleapis.com/auth/accounts.reauth"
45
+ _REAUTH_API = "https://reauth.googleapis.com/v2/sessions"
46
+
47
+ _REAUTH_NEEDED_ERROR = "invalid_grant"
48
+ _REAUTH_NEEDED_ERROR_INVALID_RAPT = "invalid_rapt"
49
+ _REAUTH_NEEDED_ERROR_RAPT_REQUIRED = "rapt_required"
50
+
51
+ _AUTHENTICATED = "AUTHENTICATED"
52
+ _CHALLENGE_REQUIRED = "CHALLENGE_REQUIRED"
53
+ _CHALLENGE_PENDING = "CHALLENGE_PENDING"
54
+
55
+
56
+ # Override this global variable to set custom max number of rounds of reauth
57
+ # challenges should be run.
58
+ RUN_CHALLENGE_RETRY_LIMIT = 5
59
+
60
+
61
+ def is_interactive():
62
+ """Check if we are in an interractive environment.
63
+
64
+ Override this function with a different logic if you are using this library
65
+ outside a CLI.
66
+
67
+ If the rapt token needs refreshing, the user needs to answer the challenges.
68
+ If the user is not in an interractive environment, the challenges can not
69
+ be answered and we just wait for timeout for no reason.
70
+
71
+ Returns:
72
+ bool: True if is interactive environment, False otherwise.
73
+ """
74
+
75
+ return sys.stdin.isatty()
76
+
77
+
78
+ def _get_challenges(
79
+ request, supported_challenge_types, access_token, requested_scopes=None
80
+ ):
81
+ """Does initial request to reauth API to get the challenges.
82
+
83
+ Args:
84
+ request (google.auth.transport.Request): A callable used to make
85
+ HTTP requests.
86
+ supported_challenge_types (Sequence[str]): list of challenge names
87
+ supported by the manager.
88
+ access_token (str): Access token with reauth scopes.
89
+ requested_scopes (Optional(Sequence[str])): Authorized scopes for the credentials.
90
+
91
+ Returns:
92
+ dict: The response from the reauth API.
93
+ """
94
+ body = {"supportedChallengeTypes": supported_challenge_types}
95
+ if requested_scopes:
96
+ body["oauthScopesForDomainPolicyLookup"] = requested_scopes
97
+
98
+ return _client._token_endpoint_request(
99
+ request, _REAUTH_API + ":start", body, access_token=access_token, use_json=True
100
+ )
101
+
102
+
103
+ def _send_challenge_result(
104
+ request, session_id, challenge_id, client_input, access_token
105
+ ):
106
+ """Attempt to refresh access token by sending next challenge result.
107
+
108
+ Args:
109
+ request (google.auth.transport.Request): A callable used to make
110
+ HTTP requests.
111
+ session_id (str): session id returned by the initial reauth call.
112
+ challenge_id (str): challenge id returned by the initial reauth call.
113
+ client_input: dict with a challenge-specific client input. For example:
114
+ ``{'credential': password}`` for password challenge.
115
+ access_token (str): Access token with reauth scopes.
116
+
117
+ Returns:
118
+ dict: The response from the reauth API.
119
+ """
120
+ body = {
121
+ "sessionId": session_id,
122
+ "challengeId": challenge_id,
123
+ "action": "RESPOND",
124
+ "proposalResponse": client_input,
125
+ }
126
+
127
+ return _client._token_endpoint_request(
128
+ request,
129
+ _REAUTH_API + "/{}:continue".format(session_id),
130
+ body,
131
+ access_token=access_token,
132
+ use_json=True,
133
+ )
134
+
135
+
136
+ def _run_next_challenge(msg, request, access_token):
137
+ """Get the next challenge from msg and run it.
138
+
139
+ Args:
140
+ msg (dict): Reauth API response body (either from the initial request to
141
+ https://reauth.googleapis.com/v2/sessions:start or from sending the
142
+ previous challenge response to
143
+ https://reauth.googleapis.com/v2/sessions/id:continue)
144
+ request (google.auth.transport.Request): A callable used to make
145
+ HTTP requests.
146
+ access_token (str): reauth access token
147
+
148
+ Returns:
149
+ dict: The response from the reauth API.
150
+
151
+ Raises:
152
+ google.auth.exceptions.ReauthError: if reauth failed.
153
+ """
154
+ for challenge in msg["challenges"]:
155
+ if challenge["status"] != "READY":
156
+ # Skip non-activated challenges.
157
+ continue
158
+ c = challenges.AVAILABLE_CHALLENGES.get(challenge["challengeType"], None)
159
+ if not c:
160
+ raise exceptions.ReauthFailError(
161
+ "Unsupported challenge type {0}. Supported types: {1}".format(
162
+ challenge["challengeType"],
163
+ ",".join(list(challenges.AVAILABLE_CHALLENGES.keys())),
164
+ )
165
+ )
166
+ if not c.is_locally_eligible:
167
+ raise exceptions.ReauthFailError(
168
+ "Challenge {0} is not locally eligible".format(
169
+ challenge["challengeType"]
170
+ )
171
+ )
172
+ client_input = c.obtain_challenge_input(challenge)
173
+ if not client_input:
174
+ return None
175
+ return _send_challenge_result(
176
+ request,
177
+ msg["sessionId"],
178
+ challenge["challengeId"],
179
+ client_input,
180
+ access_token,
181
+ )
182
+ return None
183
+
184
+
185
+ def _obtain_rapt(request, access_token, requested_scopes):
186
+ """Given an http request method and reauth access token, get rapt token.
187
+
188
+ Args:
189
+ request (google.auth.transport.Request): A callable used to make
190
+ HTTP requests.
191
+ access_token (str): reauth access token
192
+ requested_scopes (Sequence[str]): scopes required by the client application
193
+
194
+ Returns:
195
+ str: The rapt token.
196
+
197
+ Raises:
198
+ google.auth.exceptions.ReauthError: if reauth failed
199
+ """
200
+ msg = _get_challenges(
201
+ request,
202
+ list(challenges.AVAILABLE_CHALLENGES.keys()),
203
+ access_token,
204
+ requested_scopes,
205
+ )
206
+
207
+ if msg["status"] == _AUTHENTICATED:
208
+ return msg["encodedProofOfReauthToken"]
209
+
210
+ for _ in range(0, RUN_CHALLENGE_RETRY_LIMIT):
211
+ if not (
212
+ msg["status"] == _CHALLENGE_REQUIRED or msg["status"] == _CHALLENGE_PENDING
213
+ ):
214
+ raise exceptions.ReauthFailError(
215
+ "Reauthentication challenge failed due to API error: {}".format(
216
+ msg["status"]
217
+ )
218
+ )
219
+
220
+ if not is_interactive():
221
+ raise exceptions.ReauthFailError(
222
+ "Reauthentication challenge could not be answered because you are not"
223
+ " in an interactive session."
224
+ )
225
+
226
+ msg = _run_next_challenge(msg, request, access_token)
227
+
228
+ if msg["status"] == _AUTHENTICATED:
229
+ return msg["encodedProofOfReauthToken"]
230
+
231
+ # If we got here it means we didn't get authenticated.
232
+ raise exceptions.ReauthFailError("Failed to obtain rapt token.")
233
+
234
+
235
+ def get_rapt_token(
236
+ request, client_id, client_secret, refresh_token, token_uri, scopes=None
237
+ ):
238
+ """Given an http request method and refresh_token, get rapt token.
239
+
240
+ Args:
241
+ request (google.auth.transport.Request): A callable used to make
242
+ HTTP requests.
243
+ client_id (str): client id to get access token for reauth scope.
244
+ client_secret (str): client secret for the client_id
245
+ refresh_token (str): refresh token to refresh access token
246
+ token_uri (str): uri to refresh access token
247
+ scopes (Optional(Sequence[str])): scopes required by the client application
248
+
249
+ Returns:
250
+ str: The rapt token.
251
+ Raises:
252
+ google.auth.exceptions.RefreshError: If reauth failed.
253
+ """
254
+ sys.stderr.write("Reauthentication required.\n")
255
+
256
+ # Get access token for reauth.
257
+ access_token, _, _, _ = _client.refresh_grant(
258
+ request=request,
259
+ client_id=client_id,
260
+ client_secret=client_secret,
261
+ refresh_token=refresh_token,
262
+ token_uri=token_uri,
263
+ scopes=[_REAUTH_SCOPE],
264
+ )
265
+
266
+ # Get rapt token from reauth API.
267
+ rapt_token = _obtain_rapt(request, access_token, requested_scopes=scopes)
268
+
269
+ return rapt_token
270
+
271
+
272
+ def refresh_grant(
273
+ request,
274
+ token_uri,
275
+ refresh_token,
276
+ client_id,
277
+ client_secret,
278
+ scopes=None,
279
+ rapt_token=None,
280
+ enable_reauth_refresh=False,
281
+ ):
282
+ """Implements the reauthentication flow.
283
+
284
+ Args:
285
+ request (google.auth.transport.Request): A callable used to make
286
+ HTTP requests.
287
+ token_uri (str): The OAuth 2.0 authorizations server's token endpoint
288
+ URI.
289
+ refresh_token (str): The refresh token to use to get a new access
290
+ token.
291
+ client_id (str): The OAuth 2.0 application's client ID.
292
+ client_secret (str): The Oauth 2.0 appliaction's client secret.
293
+ scopes (Optional(Sequence[str])): Scopes to request. If present, all
294
+ scopes must be authorized for the refresh token. Useful if refresh
295
+ token has a wild card scope (e.g.
296
+ 'https://www.googleapis.com/auth/any-api').
297
+ rapt_token (Optional(str)): The rapt token for reauth.
298
+ enable_reauth_refresh (Optional[bool]): Whether reauth refresh flow
299
+ should be used. The default value is False. This option is for
300
+ gcloud only, other users should use the default value.
301
+
302
+ Returns:
303
+ Tuple[str, Optional[str], Optional[datetime], Mapping[str, str], str]: The
304
+ access token, new refresh token, expiration, the additional data
305
+ returned by the token endpoint, and the rapt token.
306
+
307
+ Raises:
308
+ google.auth.exceptions.RefreshError: If the token endpoint returned
309
+ an error.
310
+ """
311
+ body = {
312
+ "grant_type": _client._REFRESH_GRANT_TYPE,
313
+ "client_id": client_id,
314
+ "client_secret": client_secret,
315
+ "refresh_token": refresh_token,
316
+ }
317
+ if scopes:
318
+ body["scope"] = " ".join(scopes)
319
+ if rapt_token:
320
+ body["rapt"] = rapt_token
321
+
322
+ response_status_ok, response_data = _client._token_endpoint_request_no_throw(
323
+ request, token_uri, body
324
+ )
325
+ if (
326
+ not response_status_ok
327
+ and response_data.get("error") == _REAUTH_NEEDED_ERROR
328
+ and (
329
+ response_data.get("error_subtype") == _REAUTH_NEEDED_ERROR_INVALID_RAPT
330
+ or response_data.get("error_subtype") == _REAUTH_NEEDED_ERROR_RAPT_REQUIRED
331
+ )
332
+ ):
333
+ if not enable_reauth_refresh:
334
+ raise exceptions.RefreshError(
335
+ "Reauthentication is needed. Please run `gcloud auth login --update-adc` to reauthenticate."
336
+ )
337
+
338
+ rapt_token = get_rapt_token(
339
+ request, client_id, client_secret, refresh_token, token_uri, scopes=scopes
340
+ )
341
+ body["rapt"] = rapt_token
342
+ (response_status_ok, response_data) = _client._token_endpoint_request_no_throw(
343
+ request, token_uri, body
344
+ )
345
+
346
+ if not response_status_ok:
347
+ _client._handle_error_response(response_data)
348
+ return _client._handle_refresh_grant_response(response_data, refresh_token) + (
349
+ rapt_token,
350
+ )
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/service_account.py ADDED
@@ -0,0 +1,687 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """Service Accounts: JSON Web Token (JWT) Profile for OAuth 2.0
16
+
17
+ This module implements the JWT Profile for OAuth 2.0 Authorization Grants
18
+ as defined by `RFC 7523`_ with particular support for how this RFC is
19
+ implemented in Google's infrastructure. Google refers to these credentials
20
+ as *Service Accounts*.
21
+
22
+ Service accounts are used for server-to-server communication, such as
23
+ interactions between a web application server and a Google service. The
24
+ service account belongs to your application instead of to an individual end
25
+ user. In contrast to other OAuth 2.0 profiles, no users are involved and your
26
+ application "acts" as the service account.
27
+
28
+ Typically an application uses a service account when the application uses
29
+ Google APIs to work with its own data rather than a user's data. For example,
30
+ an application that uses Google Cloud Datastore for data persistence would use
31
+ a service account to authenticate its calls to the Google Cloud Datastore API.
32
+ However, an application that needs to access a user's Drive documents would
33
+ use the normal OAuth 2.0 profile.
34
+
35
+ Additionally, Google Apps domain administrators can grant service accounts
36
+ `domain-wide delegation`_ authority to access user data on behalf of users in
37
+ the domain.
38
+
39
+ This profile uses a JWT to acquire an OAuth 2.0 access token. The JWT is used
40
+ in place of the usual authorization token returned during the standard
41
+ OAuth 2.0 Authorization Code grant. The JWT is only used for this purpose, as
42
+ the acquired access token is used as the bearer token when making requests
43
+ using these credentials.
44
+
45
+ This profile differs from normal OAuth 2.0 profile because no user consent
46
+ step is required. The use of the private key allows this profile to assert
47
+ identity directly.
48
+
49
+ This profile also differs from the :mod:`google.auth.jwt` authentication
50
+ because the JWT credentials use the JWT directly as the bearer token. This
51
+ profile instead only uses the JWT to obtain an OAuth 2.0 access token. The
52
+ obtained OAuth 2.0 access token is used as the bearer token.
53
+
54
+ Domain-wide delegation
55
+ ----------------------
56
+
57
+ Domain-wide delegation allows a service account to access user data on
58
+ behalf of any user in a Google Apps domain without consent from the user.
59
+ For example, an application that uses the Google Calendar API to add events to
60
+ the calendars of all users in a Google Apps domain would use a service account
61
+ to access the Google Calendar API on behalf of users.
62
+
63
+ The Google Apps administrator must explicitly authorize the service account to
64
+ do this. This authorization step is referred to as "delegating domain-wide
65
+ authority" to a service account.
66
+
67
+ You can use domain-wise delegation by creating a set of credentials with a
68
+ specific subject using :meth:`~Credentials.with_subject`.
69
+
70
+ .. _RFC 7523: https://tools.ietf.org/html/rfc7523
71
+ """
72
+
73
+ import copy
74
+ import datetime
75
+
76
+ from google.auth import _helpers
77
+ from google.auth import _service_account_info
78
+ from google.auth import credentials
79
+ from google.auth import jwt
80
+ from google.oauth2 import _client
81
+
82
+ _DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
83
+ _GOOGLE_OAUTH2_TOKEN_ENDPOINT = "https://oauth2.googleapis.com/token"
84
+
85
+
86
+ class Credentials(
87
+ credentials.Signing, credentials.Scoped, credentials.CredentialsWithQuotaProject
88
+ ):
89
+ """Service account credentials
90
+
91
+ Usually, you'll create these credentials with one of the helper
92
+ constructors. To create credentials using a Google service account
93
+ private key JSON file::
94
+
95
+ credentials = service_account.Credentials.from_service_account_file(
96
+ 'service-account.json')
97
+
98
+ Or if you already have the service account file loaded::
99
+
100
+ service_account_info = json.load(open('service_account.json'))
101
+ credentials = service_account.Credentials.from_service_account_info(
102
+ service_account_info)
103
+
104
+ Both helper methods pass on arguments to the constructor, so you can
105
+ specify additional scopes and a subject if necessary::
106
+
107
+ credentials = service_account.Credentials.from_service_account_file(
108
+ 'service-account.json',
109
+ scopes=['email'],
110
+ subject='user@example.com')
111
+
112
+ The credentials are considered immutable. If you want to modify the scopes
113
+ or the subject used for delegation, use :meth:`with_scopes` or
114
+ :meth:`with_subject`::
115
+
116
+ scoped_credentials = credentials.with_scopes(['email'])
117
+ delegated_credentials = credentials.with_subject(subject)
118
+
119
+ To add a quota project, use :meth:`with_quota_project`::
120
+
121
+ credentials = credentials.with_quota_project('myproject-123')
122
+ """
123
+
124
+ def __init__(
125
+ self,
126
+ signer,
127
+ service_account_email,
128
+ token_uri,
129
+ scopes=None,
130
+ default_scopes=None,
131
+ subject=None,
132
+ project_id=None,
133
+ quota_project_id=None,
134
+ additional_claims=None,
135
+ always_use_jwt_access=False,
136
+ ):
137
+ """
138
+ Args:
139
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
140
+ service_account_email (str): The service account's email.
141
+ scopes (Sequence[str]): User-defined scopes to request during the
142
+ authorization grant.
143
+ default_scopes (Sequence[str]): Default scopes passed by a
144
+ Google client library. Use 'scopes' for user-defined scopes.
145
+ token_uri (str): The OAuth 2.0 Token URI.
146
+ subject (str): For domain-wide delegation, the email address of the
147
+ user to for which to request delegated access.
148
+ project_id (str): Project ID associated with the service account
149
+ credential.
150
+ quota_project_id (Optional[str]): The project ID used for quota and
151
+ billing.
152
+ additional_claims (Mapping[str, str]): Any additional claims for
153
+ the JWT assertion used in the authorization grant.
154
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
155
+ be always used.
156
+
157
+ .. note:: Typically one of the helper constructors
158
+ :meth:`from_service_account_file` or
159
+ :meth:`from_service_account_info` are used instead of calling the
160
+ constructor directly.
161
+ """
162
+ super(Credentials, self).__init__()
163
+
164
+ self._scopes = scopes
165
+ self._default_scopes = default_scopes
166
+ self._signer = signer
167
+ self._service_account_email = service_account_email
168
+ self._subject = subject
169
+ self._project_id = project_id
170
+ self._quota_project_id = quota_project_id
171
+ self._token_uri = token_uri
172
+ self._always_use_jwt_access = always_use_jwt_access
173
+
174
+ self._jwt_credentials = None
175
+
176
+ if additional_claims is not None:
177
+ self._additional_claims = additional_claims
178
+ else:
179
+ self._additional_claims = {}
180
+
181
+ @classmethod
182
+ def _from_signer_and_info(cls, signer, info, **kwargs):
183
+ """Creates a Credentials instance from a signer and service account
184
+ info.
185
+
186
+ Args:
187
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
188
+ info (Mapping[str, str]): The service account info.
189
+ kwargs: Additional arguments to pass to the constructor.
190
+
191
+ Returns:
192
+ google.auth.jwt.Credentials: The constructed credentials.
193
+
194
+ Raises:
195
+ ValueError: If the info is not in the expected format.
196
+ """
197
+ return cls(
198
+ signer,
199
+ service_account_email=info["client_email"],
200
+ token_uri=info["token_uri"],
201
+ project_id=info.get("project_id"),
202
+ **kwargs
203
+ )
204
+
205
+ @classmethod
206
+ def from_service_account_info(cls, info, **kwargs):
207
+ """Creates a Credentials instance from parsed service account info.
208
+
209
+ Args:
210
+ info (Mapping[str, str]): The service account info in Google
211
+ format.
212
+ kwargs: Additional arguments to pass to the constructor.
213
+
214
+ Returns:
215
+ google.auth.service_account.Credentials: The constructed
216
+ credentials.
217
+
218
+ Raises:
219
+ ValueError: If the info is not in the expected format.
220
+ """
221
+ signer = _service_account_info.from_dict(
222
+ info, require=["client_email", "token_uri"]
223
+ )
224
+ return cls._from_signer_and_info(signer, info, **kwargs)
225
+
226
+ @classmethod
227
+ def from_service_account_file(cls, filename, **kwargs):
228
+ """Creates a Credentials instance from a service account json file.
229
+
230
+ Args:
231
+ filename (str): The path to the service account json file.
232
+ kwargs: Additional arguments to pass to the constructor.
233
+
234
+ Returns:
235
+ google.auth.service_account.Credentials: The constructed
236
+ credentials.
237
+ """
238
+ info, signer = _service_account_info.from_filename(
239
+ filename, require=["client_email", "token_uri"]
240
+ )
241
+ return cls._from_signer_and_info(signer, info, **kwargs)
242
+
243
+ @property
244
+ def service_account_email(self):
245
+ """The service account email."""
246
+ return self._service_account_email
247
+
248
+ @property
249
+ def project_id(self):
250
+ """Project ID associated with this credential."""
251
+ return self._project_id
252
+
253
+ @property
254
+ def requires_scopes(self):
255
+ """Checks if the credentials requires scopes.
256
+
257
+ Returns:
258
+ bool: True if there are no scopes set otherwise False.
259
+ """
260
+ return True if not self._scopes else False
261
+
262
+ @_helpers.copy_docstring(credentials.Scoped)
263
+ def with_scopes(self, scopes, default_scopes=None):
264
+ return self.__class__(
265
+ self._signer,
266
+ service_account_email=self._service_account_email,
267
+ scopes=scopes,
268
+ default_scopes=default_scopes,
269
+ token_uri=self._token_uri,
270
+ subject=self._subject,
271
+ project_id=self._project_id,
272
+ quota_project_id=self._quota_project_id,
273
+ additional_claims=self._additional_claims.copy(),
274
+ always_use_jwt_access=self._always_use_jwt_access,
275
+ )
276
+
277
+ def with_always_use_jwt_access(self, always_use_jwt_access):
278
+ """Create a copy of these credentials with the specified always_use_jwt_access value.
279
+
280
+ Args:
281
+ always_use_jwt_access (bool): Whether always use self signed JWT or not.
282
+
283
+ Returns:
284
+ google.auth.service_account.Credentials: A new credentials
285
+ instance.
286
+ """
287
+ return self.__class__(
288
+ self._signer,
289
+ service_account_email=self._service_account_email,
290
+ scopes=self._scopes,
291
+ default_scopes=self._default_scopes,
292
+ token_uri=self._token_uri,
293
+ subject=self._subject,
294
+ project_id=self._project_id,
295
+ quota_project_id=self._quota_project_id,
296
+ additional_claims=self._additional_claims.copy(),
297
+ always_use_jwt_access=always_use_jwt_access,
298
+ )
299
+
300
+ def with_subject(self, subject):
301
+ """Create a copy of these credentials with the specified subject.
302
+
303
+ Args:
304
+ subject (str): The subject claim.
305
+
306
+ Returns:
307
+ google.auth.service_account.Credentials: A new credentials
308
+ instance.
309
+ """
310
+ return self.__class__(
311
+ self._signer,
312
+ service_account_email=self._service_account_email,
313
+ scopes=self._scopes,
314
+ default_scopes=self._default_scopes,
315
+ token_uri=self._token_uri,
316
+ subject=subject,
317
+ project_id=self._project_id,
318
+ quota_project_id=self._quota_project_id,
319
+ additional_claims=self._additional_claims.copy(),
320
+ always_use_jwt_access=self._always_use_jwt_access,
321
+ )
322
+
323
+ def with_claims(self, additional_claims):
324
+ """Returns a copy of these credentials with modified claims.
325
+
326
+ Args:
327
+ additional_claims (Mapping[str, str]): Any additional claims for
328
+ the JWT payload. This will be merged with the current
329
+ additional claims.
330
+
331
+ Returns:
332
+ google.auth.service_account.Credentials: A new credentials
333
+ instance.
334
+ """
335
+ new_additional_claims = copy.deepcopy(self._additional_claims)
336
+ new_additional_claims.update(additional_claims or {})
337
+
338
+ return self.__class__(
339
+ self._signer,
340
+ service_account_email=self._service_account_email,
341
+ scopes=self._scopes,
342
+ default_scopes=self._default_scopes,
343
+ token_uri=self._token_uri,
344
+ subject=self._subject,
345
+ project_id=self._project_id,
346
+ quota_project_id=self._quota_project_id,
347
+ additional_claims=new_additional_claims,
348
+ always_use_jwt_access=self._always_use_jwt_access,
349
+ )
350
+
351
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
352
+ def with_quota_project(self, quota_project_id):
353
+
354
+ return self.__class__(
355
+ self._signer,
356
+ service_account_email=self._service_account_email,
357
+ default_scopes=self._default_scopes,
358
+ scopes=self._scopes,
359
+ token_uri=self._token_uri,
360
+ subject=self._subject,
361
+ project_id=self._project_id,
362
+ quota_project_id=quota_project_id,
363
+ additional_claims=self._additional_claims.copy(),
364
+ always_use_jwt_access=self._always_use_jwt_access,
365
+ )
366
+
367
+ def _make_authorization_grant_assertion(self):
368
+ """Create the OAuth 2.0 assertion.
369
+
370
+ This assertion is used during the OAuth 2.0 grant to acquire an
371
+ access token.
372
+
373
+ Returns:
374
+ bytes: The authorization grant assertion.
375
+ """
376
+ now = _helpers.utcnow()
377
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
378
+ expiry = now + lifetime
379
+
380
+ payload = {
381
+ "iat": _helpers.datetime_to_secs(now),
382
+ "exp": _helpers.datetime_to_secs(expiry),
383
+ # The issuer must be the service account email.
384
+ "iss": self._service_account_email,
385
+ # The audience must be the auth token endpoint's URI
386
+ "aud": _GOOGLE_OAUTH2_TOKEN_ENDPOINT,
387
+ "scope": _helpers.scopes_to_string(self._scopes or ()),
388
+ }
389
+
390
+ payload.update(self._additional_claims)
391
+
392
+ # The subject can be a user email for domain-wide delegation.
393
+ if self._subject:
394
+ payload.setdefault("sub", self._subject)
395
+
396
+ token = jwt.encode(self._signer, payload)
397
+
398
+ return token
399
+
400
+ @_helpers.copy_docstring(credentials.Credentials)
401
+ def refresh(self, request):
402
+ # Since domain wide delegation doesn't work with self signed JWT. If
403
+ # subject exists, then we should not use self signed JWT.
404
+ if self._subject is None and self._jwt_credentials is not None:
405
+ self._jwt_credentials.refresh(request)
406
+ self.token = self._jwt_credentials.token
407
+ self.expiry = self._jwt_credentials.expiry
408
+ else:
409
+ assertion = self._make_authorization_grant_assertion()
410
+ access_token, expiry, _ = _client.jwt_grant(
411
+ request, self._token_uri, assertion
412
+ )
413
+ self.token = access_token
414
+ self.expiry = expiry
415
+
416
+ def _create_self_signed_jwt(self, audience):
417
+ """Create a self-signed JWT from the credentials if requirements are met.
418
+
419
+ Args:
420
+ audience (str): The service URL. ``https://[API_ENDPOINT]/``
421
+ """
422
+ # https://google.aip.dev/auth/4111
423
+ if self._always_use_jwt_access:
424
+ if self._scopes:
425
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
426
+ self, None, additional_claims={"scope": " ".join(self._scopes)}
427
+ )
428
+ elif audience:
429
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
430
+ self, audience
431
+ )
432
+ elif self._default_scopes:
433
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
434
+ self,
435
+ None,
436
+ additional_claims={"scope": " ".join(self._default_scopes)},
437
+ )
438
+ elif not self._scopes and audience:
439
+ self._jwt_credentials = jwt.Credentials.from_signing_credentials(
440
+ self, audience
441
+ )
442
+
443
+ @_helpers.copy_docstring(credentials.Signing)
444
+ def sign_bytes(self, message):
445
+ return self._signer.sign(message)
446
+
447
+ @property # type: ignore
448
+ @_helpers.copy_docstring(credentials.Signing)
449
+ def signer(self):
450
+ return self._signer
451
+
452
+ @property # type: ignore
453
+ @_helpers.copy_docstring(credentials.Signing)
454
+ def signer_email(self):
455
+ return self._service_account_email
456
+
457
+
458
+ class IDTokenCredentials(credentials.Signing, credentials.CredentialsWithQuotaProject):
459
+ """Open ID Connect ID Token-based service account credentials.
460
+
461
+ These credentials are largely similar to :class:`.Credentials`, but instead
462
+ of using an OAuth 2.0 Access Token as the bearer token, they use an Open
463
+ ID Connect ID Token as the bearer token. These credentials are useful when
464
+ communicating to services that require ID Tokens and can not accept access
465
+ tokens.
466
+
467
+ Usually, you'll create these credentials with one of the helper
468
+ constructors. To create credentials using a Google service account
469
+ private key JSON file::
470
+
471
+ credentials = (
472
+ service_account.IDTokenCredentials.from_service_account_file(
473
+ 'service-account.json'))
474
+
475
+
476
+ Or if you already have the service account file loaded::
477
+
478
+ service_account_info = json.load(open('service_account.json'))
479
+ credentials = (
480
+ service_account.IDTokenCredentials.from_service_account_info(
481
+ service_account_info))
482
+
483
+
484
+ Both helper methods pass on arguments to the constructor, so you can
485
+ specify additional scopes and a subject if necessary::
486
+
487
+ credentials = (
488
+ service_account.IDTokenCredentials.from_service_account_file(
489
+ 'service-account.json',
490
+ scopes=['email'],
491
+ subject='user@example.com'))
492
+
493
+
494
+ The credentials are considered immutable. If you want to modify the scopes
495
+ or the subject used for delegation, use :meth:`with_scopes` or
496
+ :meth:`with_subject`::
497
+
498
+ scoped_credentials = credentials.with_scopes(['email'])
499
+ delegated_credentials = credentials.with_subject(subject)
500
+
501
+ """
502
+
503
+ def __init__(
504
+ self,
505
+ signer,
506
+ service_account_email,
507
+ token_uri,
508
+ target_audience,
509
+ additional_claims=None,
510
+ quota_project_id=None,
511
+ ):
512
+ """
513
+ Args:
514
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
515
+ service_account_email (str): The service account's email.
516
+ token_uri (str): The OAuth 2.0 Token URI.
517
+ target_audience (str): The intended audience for these credentials,
518
+ used when requesting the ID Token. The ID Token's ``aud`` claim
519
+ will be set to this string.
520
+ additional_claims (Mapping[str, str]): Any additional claims for
521
+ the JWT assertion used in the authorization grant.
522
+ quota_project_id (Optional[str]): The project ID used for quota and billing.
523
+ .. note:: Typically one of the helper constructors
524
+ :meth:`from_service_account_file` or
525
+ :meth:`from_service_account_info` are used instead of calling the
526
+ constructor directly.
527
+ """
528
+ super(IDTokenCredentials, self).__init__()
529
+ self._signer = signer
530
+ self._service_account_email = service_account_email
531
+ self._token_uri = token_uri
532
+ self._target_audience = target_audience
533
+ self._quota_project_id = quota_project_id
534
+
535
+ if additional_claims is not None:
536
+ self._additional_claims = additional_claims
537
+ else:
538
+ self._additional_claims = {}
539
+
540
+ @classmethod
541
+ def _from_signer_and_info(cls, signer, info, **kwargs):
542
+ """Creates a credentials instance from a signer and service account
543
+ info.
544
+
545
+ Args:
546
+ signer (google.auth.crypt.Signer): The signer used to sign JWTs.
547
+ info (Mapping[str, str]): The service account info.
548
+ kwargs: Additional arguments to pass to the constructor.
549
+
550
+ Returns:
551
+ google.auth.jwt.IDTokenCredentials: The constructed credentials.
552
+
553
+ Raises:
554
+ ValueError: If the info is not in the expected format.
555
+ """
556
+ kwargs.setdefault("service_account_email", info["client_email"])
557
+ kwargs.setdefault("token_uri", info["token_uri"])
558
+ return cls(signer, **kwargs)
559
+
560
+ @classmethod
561
+ def from_service_account_info(cls, info, **kwargs):
562
+ """Creates a credentials instance from parsed service account info.
563
+
564
+ Args:
565
+ info (Mapping[str, str]): The service account info in Google
566
+ format.
567
+ kwargs: Additional arguments to pass to the constructor.
568
+
569
+ Returns:
570
+ google.auth.service_account.IDTokenCredentials: The constructed
571
+ credentials.
572
+
573
+ Raises:
574
+ ValueError: If the info is not in the expected format.
575
+ """
576
+ signer = _service_account_info.from_dict(
577
+ info, require=["client_email", "token_uri"]
578
+ )
579
+ return cls._from_signer_and_info(signer, info, **kwargs)
580
+
581
+ @classmethod
582
+ def from_service_account_file(cls, filename, **kwargs):
583
+ """Creates a credentials instance from a service account json file.
584
+
585
+ Args:
586
+ filename (str): The path to the service account json file.
587
+ kwargs: Additional arguments to pass to the constructor.
588
+
589
+ Returns:
590
+ google.auth.service_account.IDTokenCredentials: The constructed
591
+ credentials.
592
+ """
593
+ info, signer = _service_account_info.from_filename(
594
+ filename, require=["client_email", "token_uri"]
595
+ )
596
+ return cls._from_signer_and_info(signer, info, **kwargs)
597
+
598
+ def with_target_audience(self, target_audience):
599
+ """Create a copy of these credentials with the specified target
600
+ audience.
601
+
602
+ Args:
603
+ target_audience (str): The intended audience for these credentials,
604
+ used when requesting the ID Token.
605
+
606
+ Returns:
607
+ google.auth.service_account.IDTokenCredentials: A new credentials
608
+ instance.
609
+ """
610
+ return self.__class__(
611
+ self._signer,
612
+ service_account_email=self._service_account_email,
613
+ token_uri=self._token_uri,
614
+ target_audience=target_audience,
615
+ additional_claims=self._additional_claims.copy(),
616
+ quota_project_id=self.quota_project_id,
617
+ )
618
+
619
+ @_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
620
+ def with_quota_project(self, quota_project_id):
621
+ return self.__class__(
622
+ self._signer,
623
+ service_account_email=self._service_account_email,
624
+ token_uri=self._token_uri,
625
+ target_audience=self._target_audience,
626
+ additional_claims=self._additional_claims.copy(),
627
+ quota_project_id=quota_project_id,
628
+ )
629
+
630
+ def _make_authorization_grant_assertion(self):
631
+ """Create the OAuth 2.0 assertion.
632
+
633
+ This assertion is used during the OAuth 2.0 grant to acquire an
634
+ ID token.
635
+
636
+ Returns:
637
+ bytes: The authorization grant assertion.
638
+ """
639
+ now = _helpers.utcnow()
640
+ lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
641
+ expiry = now + lifetime
642
+
643
+ payload = {
644
+ "iat": _helpers.datetime_to_secs(now),
645
+ "exp": _helpers.datetime_to_secs(expiry),
646
+ # The issuer must be the service account email.
647
+ "iss": self.service_account_email,
648
+ # The audience must be the auth token endpoint's URI
649
+ "aud": _GOOGLE_OAUTH2_TOKEN_ENDPOINT,
650
+ # The target audience specifies which service the ID token is
651
+ # intended for.
652
+ "target_audience": self._target_audience,
653
+ }
654
+
655
+ payload.update(self._additional_claims)
656
+
657
+ token = jwt.encode(self._signer, payload)
658
+
659
+ return token
660
+
661
+ @_helpers.copy_docstring(credentials.Credentials)
662
+ def refresh(self, request):
663
+ assertion = self._make_authorization_grant_assertion()
664
+ access_token, expiry, _ = _client.id_token_jwt_grant(
665
+ request, self._token_uri, assertion
666
+ )
667
+ self.token = access_token
668
+ self.expiry = expiry
669
+
670
+ @property
671
+ def service_account_email(self):
672
+ """The service account email."""
673
+ return self._service_account_email
674
+
675
+ @_helpers.copy_docstring(credentials.Signing)
676
+ def sign_bytes(self, message):
677
+ return self._signer.sign(message)
678
+
679
+ @property # type: ignore
680
+ @_helpers.copy_docstring(credentials.Signing)
681
+ def signer(self):
682
+ return self._signer
683
+
684
+ @property # type: ignore
685
+ @_helpers.copy_docstring(credentials.Signing)
686
+ def signer_email(self):
687
+ return self._service_account_email
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/sts.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """OAuth 2.0 Token Exchange Spec.
16
+
17
+ This module defines a token exchange utility based on the `OAuth 2.0 Token
18
+ Exchange`_ spec. This will be mainly used to exchange external credentials
19
+ for GCP access tokens in workload identity pools to access Google APIs.
20
+
21
+ The implementation will support various types of client authentication as
22
+ allowed in the spec.
23
+
24
+ A deviation on the spec will be for additional Google specific options that
25
+ cannot be easily mapped to parameters defined in the RFC.
26
+
27
+ The returned dictionary response will be based on the `rfc8693 section 2.2.1`_
28
+ spec JSON response.
29
+
30
+ .. _OAuth 2.0 Token Exchange: https://tools.ietf.org/html/rfc8693
31
+ .. _rfc8693 section 2.2.1: https://tools.ietf.org/html/rfc8693#section-2.2.1
32
+ """
33
+
34
+ import json
35
+
36
+ from six.moves import http_client
37
+ from six.moves import urllib
38
+
39
+ from google.oauth2 import utils
40
+
41
+
42
+ _URLENCODED_HEADERS = {"Content-Type": "application/x-www-form-urlencoded"}
43
+
44
+
45
+ class Client(utils.OAuthClientAuthHandler):
46
+ """Implements the OAuth 2.0 token exchange spec based on
47
+ https://tools.ietf.org/html/rfc8693.
48
+ """
49
+
50
+ def __init__(self, token_exchange_endpoint, client_authentication=None):
51
+ """Initializes an STS client instance.
52
+
53
+ Args:
54
+ token_exchange_endpoint (str): The token exchange endpoint.
55
+ client_authentication (Optional(google.oauth2.oauth2_utils.ClientAuthentication)):
56
+ The optional OAuth client authentication credentials if available.
57
+ """
58
+ super(Client, self).__init__(client_authentication)
59
+ self._token_exchange_endpoint = token_exchange_endpoint
60
+
61
+ def exchange_token(
62
+ self,
63
+ request,
64
+ grant_type,
65
+ subject_token,
66
+ subject_token_type,
67
+ resource=None,
68
+ audience=None,
69
+ scopes=None,
70
+ requested_token_type=None,
71
+ actor_token=None,
72
+ actor_token_type=None,
73
+ additional_options=None,
74
+ additional_headers=None,
75
+ ):
76
+ """Exchanges the provided token for another type of token based on the
77
+ rfc8693 spec.
78
+
79
+ Args:
80
+ request (google.auth.transport.Request): A callable used to make
81
+ HTTP requests.
82
+ grant_type (str): The OAuth 2.0 token exchange grant type.
83
+ subject_token (str): The OAuth 2.0 token exchange subject token.
84
+ subject_token_type (str): The OAuth 2.0 token exchange subject token type.
85
+ resource (Optional[str]): The optional OAuth 2.0 token exchange resource field.
86
+ audience (Optional[str]): The optional OAuth 2.0 token exchange audience field.
87
+ scopes (Optional[Sequence[str]]): The optional list of scopes to use.
88
+ requested_token_type (Optional[str]): The optional OAuth 2.0 token exchange requested
89
+ token type.
90
+ actor_token (Optional[str]): The optional OAuth 2.0 token exchange actor token.
91
+ actor_token_type (Optional[str]): The optional OAuth 2.0 token exchange actor token type.
92
+ additional_options (Optional[Mapping[str, str]]): The optional additional
93
+ non-standard Google specific options.
94
+ additional_headers (Optional[Mapping[str, str]]): The optional additional
95
+ headers to pass to the token exchange endpoint.
96
+
97
+ Returns:
98
+ Mapping[str, str]: The token exchange JSON-decoded response data containing
99
+ the requested token and its expiration time.
100
+
101
+ Raises:
102
+ google.auth.exceptions.OAuthError: If the token endpoint returned
103
+ an error.
104
+ """
105
+ # Initialize request headers.
106
+ headers = _URLENCODED_HEADERS.copy()
107
+ # Inject additional headers.
108
+ if additional_headers:
109
+ for k, v in dict(additional_headers).items():
110
+ headers[k] = v
111
+ # Initialize request body.
112
+ request_body = {
113
+ "grant_type": grant_type,
114
+ "resource": resource,
115
+ "audience": audience,
116
+ "scope": " ".join(scopes or []),
117
+ "requested_token_type": requested_token_type,
118
+ "subject_token": subject_token,
119
+ "subject_token_type": subject_token_type,
120
+ "actor_token": actor_token,
121
+ "actor_token_type": actor_token_type,
122
+ "options": None,
123
+ }
124
+ # Add additional non-standard options.
125
+ if additional_options:
126
+ request_body["options"] = urllib.parse.quote(json.dumps(additional_options))
127
+ # Remove empty fields in request body.
128
+ for k, v in dict(request_body).items():
129
+ if v is None or v == "":
130
+ del request_body[k]
131
+ # Apply OAuth client authentication.
132
+ self.apply_client_authentication_options(headers, request_body)
133
+
134
+ # Execute request.
135
+ response = request(
136
+ url=self._token_exchange_endpoint,
137
+ method="POST",
138
+ headers=headers,
139
+ body=urllib.parse.urlencode(request_body).encode("utf-8"),
140
+ )
141
+
142
+ response_body = (
143
+ response.data.decode("utf-8")
144
+ if hasattr(response.data, "decode")
145
+ else response.data
146
+ )
147
+
148
+ # If non-200 response received, translate to OAuthError exception.
149
+ if response.status != http_client.OK:
150
+ utils.handle_error_response(response_body)
151
+
152
+ response_data = json.loads(response_body)
153
+
154
+ # Return successful response.
155
+ return response_data
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/oauth2/utils.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2020 Google LLC
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ """OAuth 2.0 Utilities.
16
+
17
+ This module provides implementations for various OAuth 2.0 utilities.
18
+ This includes `OAuth error handling`_ and
19
+ `Client authentication for OAuth flows`_.
20
+
21
+ OAuth error handling
22
+ --------------------
23
+ This will define interfaces for handling OAuth related error responses as
24
+ stated in `RFC 6749 section 5.2`_.
25
+ This will include a common function to convert these HTTP error responses to a
26
+ :class:`google.auth.exceptions.OAuthError` exception.
27
+
28
+
29
+ Client authentication for OAuth flows
30
+ -------------------------------------
31
+ We introduce an interface for defining client authentication credentials based
32
+ on `RFC 6749 section 2.3.1`_. This will expose the following
33
+ capabilities:
34
+
35
+ * Ability to support basic authentication via request header.
36
+ * Ability to support bearer token authentication via request header.
37
+ * Ability to support client ID / secret authentication via request body.
38
+
39
+ .. _RFC 6749 section 2.3.1: https://tools.ietf.org/html/rfc6749#section-2.3.1
40
+ .. _RFC 6749 section 5.2: https://tools.ietf.org/html/rfc6749#section-5.2
41
+ """
42
+
43
+ import abc
44
+ import base64
45
+ import enum
46
+ import json
47
+
48
+ import six
49
+
50
+ from google.auth import exceptions
51
+
52
+
53
+ # OAuth client authentication based on
54
+ # https://tools.ietf.org/html/rfc6749#section-2.3.
55
+ class ClientAuthType(enum.Enum):
56
+ basic = 1
57
+ request_body = 2
58
+
59
+
60
+ class ClientAuthentication(object):
61
+ """Defines the client authentication credentials for basic and request-body
62
+ types based on https://tools.ietf.org/html/rfc6749#section-2.3.1.
63
+ """
64
+
65
+ def __init__(self, client_auth_type, client_id, client_secret=None):
66
+ """Instantiates a client authentication object containing the client ID
67
+ and secret credentials for basic and response-body auth.
68
+
69
+ Args:
70
+ client_auth_type (google.oauth2.oauth_utils.ClientAuthType): The
71
+ client authentication type.
72
+ client_id (str): The client ID.
73
+ client_secret (Optional[str]): The client secret.
74
+ """
75
+ self.client_auth_type = client_auth_type
76
+ self.client_id = client_id
77
+ self.client_secret = client_secret
78
+
79
+
80
+ @six.add_metaclass(abc.ABCMeta)
81
+ class OAuthClientAuthHandler(object):
82
+ """Abstract class for handling client authentication in OAuth-based
83
+ operations.
84
+ """
85
+
86
+ def __init__(self, client_authentication=None):
87
+ """Instantiates an OAuth client authentication handler.
88
+
89
+ Args:
90
+ client_authentication (Optional[google.oauth2.utils.ClientAuthentication]):
91
+ The OAuth client authentication credentials if available.
92
+ """
93
+ super(OAuthClientAuthHandler, self).__init__()
94
+ self._client_authentication = client_authentication
95
+
96
+ def apply_client_authentication_options(
97
+ self, headers, request_body=None, bearer_token=None
98
+ ):
99
+ """Applies client authentication on the OAuth request's headers or POST
100
+ body.
101
+
102
+ Args:
103
+ headers (Mapping[str, str]): The HTTP request header.
104
+ request_body (Optional[Mapping[str, str]]): The HTTP request body
105
+ dictionary. For requests that do not support request body, this
106
+ is None and will be ignored.
107
+ bearer_token (Optional[str]): The optional bearer token.
108
+ """
109
+ # Inject authenticated header.
110
+ self._inject_authenticated_headers(headers, bearer_token)
111
+ # Inject authenticated request body.
112
+ if bearer_token is None:
113
+ self._inject_authenticated_request_body(request_body)
114
+
115
+ def _inject_authenticated_headers(self, headers, bearer_token=None):
116
+ if bearer_token is not None:
117
+ headers["Authorization"] = "Bearer %s" % bearer_token
118
+ elif (
119
+ self._client_authentication is not None
120
+ and self._client_authentication.client_auth_type is ClientAuthType.basic
121
+ ):
122
+ username = self._client_authentication.client_id
123
+ password = self._client_authentication.client_secret or ""
124
+
125
+ credentials = base64.b64encode(
126
+ ("%s:%s" % (username, password)).encode()
127
+ ).decode()
128
+ headers["Authorization"] = "Basic %s" % credentials
129
+
130
+ def _inject_authenticated_request_body(self, request_body):
131
+ if (
132
+ self._client_authentication is not None
133
+ and self._client_authentication.client_auth_type
134
+ is ClientAuthType.request_body
135
+ ):
136
+ if request_body is None:
137
+ raise exceptions.OAuthError(
138
+ "HTTP request does not support request-body"
139
+ )
140
+ else:
141
+ request_body["client_id"] = self._client_authentication.client_id
142
+ request_body["client_secret"] = (
143
+ self._client_authentication.client_secret or ""
144
+ )
145
+
146
+
147
+ def handle_error_response(response_body):
148
+ """Translates an error response from an OAuth operation into an
149
+ OAuthError exception.
150
+
151
+ Args:
152
+ response_body (str): The decoded response data.
153
+
154
+ Raises:
155
+ google.auth.exceptions.OAuthError
156
+ """
157
+ try:
158
+ error_components = []
159
+ error_data = json.loads(response_body)
160
+
161
+ error_components.append("Error code {}".format(error_data["error"]))
162
+ if "error_description" in error_data:
163
+ error_components.append(": {}".format(error_data["error_description"]))
164
+ if "error_uri" in error_data:
165
+ error_components.append(" - {}".format(error_data["error_uri"]))
166
+ error_details = "".join(error_components)
167
+ # If no details could be extracted, use the response data.
168
+ except (KeyError, ValueError):
169
+ error_details = response_body
170
+
171
+ raise exceptions.OAuthError(error_details, response_body)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/descriptor.py ADDED
@@ -0,0 +1,1224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """Descriptors essentially contain exactly the information found in a .proto
32
+ file, in types that make this information accessible in Python.
33
+ """
34
+
35
+ __author__ = 'robinson@google.com (Will Robinson)'
36
+
37
+ import threading
38
+ import warnings
39
+
40
+ from google.protobuf.internal import api_implementation
41
+
42
+ _USE_C_DESCRIPTORS = False
43
+ if api_implementation.Type() == 'cpp':
44
+ # Used by MakeDescriptor in cpp mode
45
+ import binascii
46
+ import os
47
+ from google.protobuf.pyext import _message
48
+ _USE_C_DESCRIPTORS = True
49
+
50
+
51
+ class Error(Exception):
52
+ """Base error for this module."""
53
+
54
+
55
+ class TypeTransformationError(Error):
56
+ """Error transforming between python proto type and corresponding C++ type."""
57
+
58
+
59
+ if _USE_C_DESCRIPTORS:
60
+ # This metaclass allows to override the behavior of code like
61
+ # isinstance(my_descriptor, FieldDescriptor)
62
+ # and make it return True when the descriptor is an instance of the extension
63
+ # type written in C++.
64
+ class DescriptorMetaclass(type):
65
+ def __instancecheck__(cls, obj):
66
+ if super(DescriptorMetaclass, cls).__instancecheck__(obj):
67
+ return True
68
+ if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
69
+ return True
70
+ return False
71
+ else:
72
+ # The standard metaclass; nothing changes.
73
+ DescriptorMetaclass = type
74
+
75
+
76
+ class _Lock(object):
77
+ """Wrapper class of threading.Lock(), which is allowed by 'with'."""
78
+
79
+ def __new__(cls):
80
+ self = object.__new__(cls)
81
+ self._lock = threading.Lock() # pylint: disable=protected-access
82
+ return self
83
+
84
+ def __enter__(self):
85
+ self._lock.acquire()
86
+
87
+ def __exit__(self, exc_type, exc_value, exc_tb):
88
+ self._lock.release()
89
+
90
+
91
+ _lock = threading.Lock()
92
+
93
+
94
+ def _Deprecated(name):
95
+ if _Deprecated.count > 0:
96
+ _Deprecated.count -= 1
97
+ warnings.warn(
98
+ 'Call to deprecated create function %s(). Note: Create unlinked '
99
+ 'descriptors is going to go away. Please use get/find descriptors from '
100
+ 'generated code or query the descriptor_pool.'
101
+ % name,
102
+ category=DeprecationWarning, stacklevel=3)
103
+
104
+
105
+ # Deprecated warnings will print 100 times at most which should be enough for
106
+ # users to notice and do not cause timeout.
107
+ _Deprecated.count = 100
108
+
109
+
110
+ _internal_create_key = object()
111
+
112
+
113
+ class DescriptorBase(metaclass=DescriptorMetaclass):
114
+
115
+ """Descriptors base class.
116
+
117
+ This class is the base of all descriptor classes. It provides common options
118
+ related functionality.
119
+
120
+ Attributes:
121
+ has_options: True if the descriptor has non-default options. Usually it
122
+ is not necessary to read this -- just call GetOptions() which will
123
+ happily return the default instance. However, it's sometimes useful
124
+ for efficiency, and also useful inside the protobuf implementation to
125
+ avoid some bootstrapping issues.
126
+ """
127
+
128
+ if _USE_C_DESCRIPTORS:
129
+ # The class, or tuple of classes, that are considered as "virtual
130
+ # subclasses" of this descriptor class.
131
+ _C_DESCRIPTOR_CLASS = ()
132
+
133
+ def __init__(self, options, serialized_options, options_class_name):
134
+ """Initialize the descriptor given its options message and the name of the
135
+ class of the options message. The name of the class is required in case
136
+ the options message is None and has to be created.
137
+ """
138
+ self._options = options
139
+ self._options_class_name = options_class_name
140
+ self._serialized_options = serialized_options
141
+
142
+ # Does this descriptor have non-default options?
143
+ self.has_options = (options is not None) or (serialized_options is not None)
144
+
145
+ def _SetOptions(self, options, options_class_name):
146
+ """Sets the descriptor's options
147
+
148
+ This function is used in generated proto2 files to update descriptor
149
+ options. It must not be used outside proto2.
150
+ """
151
+ self._options = options
152
+ self._options_class_name = options_class_name
153
+
154
+ # Does this descriptor have non-default options?
155
+ self.has_options = options is not None
156
+
157
+ def GetOptions(self):
158
+ """Retrieves descriptor options.
159
+
160
+ This method returns the options set or creates the default options for the
161
+ descriptor.
162
+ """
163
+ if self._options:
164
+ return self._options
165
+
166
+ from google.protobuf import descriptor_pb2
167
+ try:
168
+ options_class = getattr(descriptor_pb2,
169
+ self._options_class_name)
170
+ except AttributeError:
171
+ raise RuntimeError('Unknown options class name %s!' %
172
+ (self._options_class_name))
173
+
174
+ with _lock:
175
+ if self._serialized_options is None:
176
+ self._options = options_class()
177
+ else:
178
+ self._options = _ParseOptions(options_class(),
179
+ self._serialized_options)
180
+
181
+ return self._options
182
+
183
+
184
+ class _NestedDescriptorBase(DescriptorBase):
185
+ """Common class for descriptors that can be nested."""
186
+
187
+ def __init__(self, options, options_class_name, name, full_name,
188
+ file, containing_type, serialized_start=None,
189
+ serialized_end=None, serialized_options=None):
190
+ """Constructor.
191
+
192
+ Args:
193
+ options: Protocol message options or None
194
+ to use default message options.
195
+ options_class_name (str): The class name of the above options.
196
+ name (str): Name of this protocol message type.
197
+ full_name (str): Fully-qualified name of this protocol message type,
198
+ which will include protocol "package" name and the name of any
199
+ enclosing types.
200
+ file (FileDescriptor): Reference to file info.
201
+ containing_type: if provided, this is a nested descriptor, with this
202
+ descriptor as parent, otherwise None.
203
+ serialized_start: The start index (inclusive) in block in the
204
+ file.serialized_pb that describes this descriptor.
205
+ serialized_end: The end index (exclusive) in block in the
206
+ file.serialized_pb that describes this descriptor.
207
+ serialized_options: Protocol message serialized options or None.
208
+ """
209
+ super(_NestedDescriptorBase, self).__init__(
210
+ options, serialized_options, options_class_name)
211
+
212
+ self.name = name
213
+ # TODO(falk): Add function to calculate full_name instead of having it in
214
+ # memory?
215
+ self.full_name = full_name
216
+ self.file = file
217
+ self.containing_type = containing_type
218
+
219
+ self._serialized_start = serialized_start
220
+ self._serialized_end = serialized_end
221
+
222
+ def CopyToProto(self, proto):
223
+ """Copies this to the matching proto in descriptor_pb2.
224
+
225
+ Args:
226
+ proto: An empty proto instance from descriptor_pb2.
227
+
228
+ Raises:
229
+ Error: If self couldn't be serialized, due to to few constructor
230
+ arguments.
231
+ """
232
+ if (self.file is not None and
233
+ self._serialized_start is not None and
234
+ self._serialized_end is not None):
235
+ proto.ParseFromString(self.file.serialized_pb[
236
+ self._serialized_start:self._serialized_end])
237
+ else:
238
+ raise Error('Descriptor does not contain serialization.')
239
+
240
+
241
+ class Descriptor(_NestedDescriptorBase):
242
+
243
+ """Descriptor for a protocol message type.
244
+
245
+ Attributes:
246
+ name (str): Name of this protocol message type.
247
+ full_name (str): Fully-qualified name of this protocol message type,
248
+ which will include protocol "package" name and the name of any
249
+ enclosing types.
250
+ containing_type (Descriptor): Reference to the descriptor of the type
251
+ containing us, or None if this is top-level.
252
+ fields (list[FieldDescriptor]): Field descriptors for all fields in
253
+ this type.
254
+ fields_by_number (dict(int, FieldDescriptor)): Same
255
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed
256
+ by "number" attribute in each FieldDescriptor.
257
+ fields_by_name (dict(str, FieldDescriptor)): Same
258
+ :class:`FieldDescriptor` objects as in :attr:`fields`, but indexed by
259
+ "name" attribute in each :class:`FieldDescriptor`.
260
+ nested_types (list[Descriptor]): Descriptor references
261
+ for all protocol message types nested within this one.
262
+ nested_types_by_name (dict(str, Descriptor)): Same Descriptor
263
+ objects as in :attr:`nested_types`, but indexed by "name" attribute
264
+ in each Descriptor.
265
+ enum_types (list[EnumDescriptor]): :class:`EnumDescriptor` references
266
+ for all enums contained within this type.
267
+ enum_types_by_name (dict(str, EnumDescriptor)): Same
268
+ :class:`EnumDescriptor` objects as in :attr:`enum_types`, but
269
+ indexed by "name" attribute in each EnumDescriptor.
270
+ enum_values_by_name (dict(str, EnumValueDescriptor)): Dict mapping
271
+ from enum value name to :class:`EnumValueDescriptor` for that value.
272
+ extensions (list[FieldDescriptor]): All extensions defined directly
273
+ within this message type (NOT within a nested type).
274
+ extensions_by_name (dict(str, FieldDescriptor)): Same FieldDescriptor
275
+ objects as :attr:`extensions`, but indexed by "name" attribute of each
276
+ FieldDescriptor.
277
+ is_extendable (bool): Does this type define any extension ranges?
278
+ oneofs (list[OneofDescriptor]): The list of descriptors for oneof fields
279
+ in this message.
280
+ oneofs_by_name (dict(str, OneofDescriptor)): Same objects as in
281
+ :attr:`oneofs`, but indexed by "name" attribute.
282
+ file (FileDescriptor): Reference to file descriptor.
283
+
284
+ """
285
+
286
+ if _USE_C_DESCRIPTORS:
287
+ _C_DESCRIPTOR_CLASS = _message.Descriptor
288
+
289
+ def __new__(
290
+ cls,
291
+ name=None,
292
+ full_name=None,
293
+ filename=None,
294
+ containing_type=None,
295
+ fields=None,
296
+ nested_types=None,
297
+ enum_types=None,
298
+ extensions=None,
299
+ options=None,
300
+ serialized_options=None,
301
+ is_extendable=True,
302
+ extension_ranges=None,
303
+ oneofs=None,
304
+ file=None, # pylint: disable=redefined-builtin
305
+ serialized_start=None,
306
+ serialized_end=None,
307
+ syntax=None,
308
+ create_key=None):
309
+ _message.Message._CheckCalledFromGeneratedFile()
310
+ return _message.default_pool.FindMessageTypeByName(full_name)
311
+
312
+ # NOTE(tmarek): The file argument redefining a builtin is nothing we can
313
+ # fix right now since we don't know how many clients already rely on the
314
+ # name of the argument.
315
+ def __init__(self, name, full_name, filename, containing_type, fields,
316
+ nested_types, enum_types, extensions, options=None,
317
+ serialized_options=None,
318
+ is_extendable=True, extension_ranges=None, oneofs=None,
319
+ file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
320
+ syntax=None, create_key=None):
321
+ """Arguments to __init__() are as described in the description
322
+ of Descriptor fields above.
323
+
324
+ Note that filename is an obsolete argument, that is not used anymore.
325
+ Please use file.name to access this as an attribute.
326
+ """
327
+ if create_key is not _internal_create_key:
328
+ _Deprecated('Descriptor')
329
+
330
+ super(Descriptor, self).__init__(
331
+ options, 'MessageOptions', name, full_name, file,
332
+ containing_type, serialized_start=serialized_start,
333
+ serialized_end=serialized_end, serialized_options=serialized_options)
334
+
335
+ # We have fields in addition to fields_by_name and fields_by_number,
336
+ # so that:
337
+ # 1. Clients can index fields by "order in which they're listed."
338
+ # 2. Clients can easily iterate over all fields with the terse
339
+ # syntax: for f in descriptor.fields: ...
340
+ self.fields = fields
341
+ for field in self.fields:
342
+ field.containing_type = self
343
+ self.fields_by_number = dict((f.number, f) for f in fields)
344
+ self.fields_by_name = dict((f.name, f) for f in fields)
345
+ self._fields_by_camelcase_name = None
346
+
347
+ self.nested_types = nested_types
348
+ for nested_type in nested_types:
349
+ nested_type.containing_type = self
350
+ self.nested_types_by_name = dict((t.name, t) for t in nested_types)
351
+
352
+ self.enum_types = enum_types
353
+ for enum_type in self.enum_types:
354
+ enum_type.containing_type = self
355
+ self.enum_types_by_name = dict((t.name, t) for t in enum_types)
356
+ self.enum_values_by_name = dict(
357
+ (v.name, v) for t in enum_types for v in t.values)
358
+
359
+ self.extensions = extensions
360
+ for extension in self.extensions:
361
+ extension.extension_scope = self
362
+ self.extensions_by_name = dict((f.name, f) for f in extensions)
363
+ self.is_extendable = is_extendable
364
+ self.extension_ranges = extension_ranges
365
+ self.oneofs = oneofs if oneofs is not None else []
366
+ self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
367
+ for oneof in self.oneofs:
368
+ oneof.containing_type = self
369
+ self.syntax = syntax or "proto2"
370
+
371
+ @property
372
+ def fields_by_camelcase_name(self):
373
+ """Same FieldDescriptor objects as in :attr:`fields`, but indexed by
374
+ :attr:`FieldDescriptor.camelcase_name`.
375
+ """
376
+ if self._fields_by_camelcase_name is None:
377
+ self._fields_by_camelcase_name = dict(
378
+ (f.camelcase_name, f) for f in self.fields)
379
+ return self._fields_by_camelcase_name
380
+
381
+ def EnumValueName(self, enum, value):
382
+ """Returns the string name of an enum value.
383
+
384
+ This is just a small helper method to simplify a common operation.
385
+
386
+ Args:
387
+ enum: string name of the Enum.
388
+ value: int, value of the enum.
389
+
390
+ Returns:
391
+ string name of the enum value.
392
+
393
+ Raises:
394
+ KeyError if either the Enum doesn't exist or the value is not a valid
395
+ value for the enum.
396
+ """
397
+ return self.enum_types_by_name[enum].values_by_number[value].name
398
+
399
+ def CopyToProto(self, proto):
400
+ """Copies this to a descriptor_pb2.DescriptorProto.
401
+
402
+ Args:
403
+ proto: An empty descriptor_pb2.DescriptorProto.
404
+ """
405
+ # This function is overridden to give a better doc comment.
406
+ super(Descriptor, self).CopyToProto(proto)
407
+
408
+
409
+ # TODO(robinson): We should have aggressive checking here,
410
+ # for example:
411
+ # * If you specify a repeated field, you should not be allowed
412
+ # to specify a default value.
413
+ # * [Other examples here as needed].
414
+ #
415
+ # TODO(robinson): for this and other *Descriptor classes, we
416
+ # might also want to lock things down aggressively (e.g.,
417
+ # prevent clients from setting the attributes). Having
418
+ # stronger invariants here in general will reduce the number
419
+ # of runtime checks we must do in reflection.py...
420
+ class FieldDescriptor(DescriptorBase):
421
+
422
+ """Descriptor for a single field in a .proto file.
423
+
424
+ Attributes:
425
+ name (str): Name of this field, exactly as it appears in .proto.
426
+ full_name (str): Name of this field, including containing scope. This is
427
+ particularly relevant for extensions.
428
+ index (int): Dense, 0-indexed index giving the order that this
429
+ field textually appears within its message in the .proto file.
430
+ number (int): Tag number declared for this field in the .proto file.
431
+
432
+ type (int): (One of the TYPE_* constants below) Declared type.
433
+ cpp_type (int): (One of the CPPTYPE_* constants below) C++ type used to
434
+ represent this field.
435
+
436
+ label (int): (One of the LABEL_* constants below) Tells whether this
437
+ field is optional, required, or repeated.
438
+ has_default_value (bool): True if this field has a default value defined,
439
+ otherwise false.
440
+ default_value (Varies): Default value of this field. Only
441
+ meaningful for non-repeated scalar fields. Repeated fields
442
+ should always set this to [], and non-repeated composite
443
+ fields should always set this to None.
444
+
445
+ containing_type (Descriptor): Descriptor of the protocol message
446
+ type that contains this field. Set by the Descriptor constructor
447
+ if we're passed into one.
448
+ Somewhat confusingly, for extension fields, this is the
449
+ descriptor of the EXTENDED message, not the descriptor
450
+ of the message containing this field. (See is_extension and
451
+ extension_scope below).
452
+ message_type (Descriptor): If a composite field, a descriptor
453
+ of the message type contained in this field. Otherwise, this is None.
454
+ enum_type (EnumDescriptor): If this field contains an enum, a
455
+ descriptor of that enum. Otherwise, this is None.
456
+
457
+ is_extension: True iff this describes an extension field.
458
+ extension_scope (Descriptor): Only meaningful if is_extension is True.
459
+ Gives the message that immediately contains this extension field.
460
+ Will be None iff we're a top-level (file-level) extension field.
461
+
462
+ options (descriptor_pb2.FieldOptions): Protocol message field options or
463
+ None to use default field options.
464
+
465
+ containing_oneof (OneofDescriptor): If the field is a member of a oneof
466
+ union, contains its descriptor. Otherwise, None.
467
+
468
+ file (FileDescriptor): Reference to file descriptor.
469
+ """
470
+
471
+ # Must be consistent with C++ FieldDescriptor::Type enum in
472
+ # descriptor.h.
473
+ #
474
+ # TODO(robinson): Find a way to eliminate this repetition.
475
+ TYPE_DOUBLE = 1
476
+ TYPE_FLOAT = 2
477
+ TYPE_INT64 = 3
478
+ TYPE_UINT64 = 4
479
+ TYPE_INT32 = 5
480
+ TYPE_FIXED64 = 6
481
+ TYPE_FIXED32 = 7
482
+ TYPE_BOOL = 8
483
+ TYPE_STRING = 9
484
+ TYPE_GROUP = 10
485
+ TYPE_MESSAGE = 11
486
+ TYPE_BYTES = 12
487
+ TYPE_UINT32 = 13
488
+ TYPE_ENUM = 14
489
+ TYPE_SFIXED32 = 15
490
+ TYPE_SFIXED64 = 16
491
+ TYPE_SINT32 = 17
492
+ TYPE_SINT64 = 18
493
+ MAX_TYPE = 18
494
+
495
+ # Must be consistent with C++ FieldDescriptor::CppType enum in
496
+ # descriptor.h.
497
+ #
498
+ # TODO(robinson): Find a way to eliminate this repetition.
499
+ CPPTYPE_INT32 = 1
500
+ CPPTYPE_INT64 = 2
501
+ CPPTYPE_UINT32 = 3
502
+ CPPTYPE_UINT64 = 4
503
+ CPPTYPE_DOUBLE = 5
504
+ CPPTYPE_FLOAT = 6
505
+ CPPTYPE_BOOL = 7
506
+ CPPTYPE_ENUM = 8
507
+ CPPTYPE_STRING = 9
508
+ CPPTYPE_MESSAGE = 10
509
+ MAX_CPPTYPE = 10
510
+
511
+ _PYTHON_TO_CPP_PROTO_TYPE_MAP = {
512
+ TYPE_DOUBLE: CPPTYPE_DOUBLE,
513
+ TYPE_FLOAT: CPPTYPE_FLOAT,
514
+ TYPE_ENUM: CPPTYPE_ENUM,
515
+ TYPE_INT64: CPPTYPE_INT64,
516
+ TYPE_SINT64: CPPTYPE_INT64,
517
+ TYPE_SFIXED64: CPPTYPE_INT64,
518
+ TYPE_UINT64: CPPTYPE_UINT64,
519
+ TYPE_FIXED64: CPPTYPE_UINT64,
520
+ TYPE_INT32: CPPTYPE_INT32,
521
+ TYPE_SFIXED32: CPPTYPE_INT32,
522
+ TYPE_SINT32: CPPTYPE_INT32,
523
+ TYPE_UINT32: CPPTYPE_UINT32,
524
+ TYPE_FIXED32: CPPTYPE_UINT32,
525
+ TYPE_BYTES: CPPTYPE_STRING,
526
+ TYPE_STRING: CPPTYPE_STRING,
527
+ TYPE_BOOL: CPPTYPE_BOOL,
528
+ TYPE_MESSAGE: CPPTYPE_MESSAGE,
529
+ TYPE_GROUP: CPPTYPE_MESSAGE
530
+ }
531
+
532
+ # Must be consistent with C++ FieldDescriptor::Label enum in
533
+ # descriptor.h.
534
+ #
535
+ # TODO(robinson): Find a way to eliminate this repetition.
536
+ LABEL_OPTIONAL = 1
537
+ LABEL_REQUIRED = 2
538
+ LABEL_REPEATED = 3
539
+ MAX_LABEL = 3
540
+
541
+ # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
542
+ # and kLastReservedNumber in descriptor.h
543
+ MAX_FIELD_NUMBER = (1 << 29) - 1
544
+ FIRST_RESERVED_FIELD_NUMBER = 19000
545
+ LAST_RESERVED_FIELD_NUMBER = 19999
546
+
547
+ if _USE_C_DESCRIPTORS:
548
+ _C_DESCRIPTOR_CLASS = _message.FieldDescriptor
549
+
550
+ def __new__(cls, name, full_name, index, number, type, cpp_type, label,
551
+ default_value, message_type, enum_type, containing_type,
552
+ is_extension, extension_scope, options=None,
553
+ serialized_options=None,
554
+ has_default_value=True, containing_oneof=None, json_name=None,
555
+ file=None, create_key=None): # pylint: disable=redefined-builtin
556
+ _message.Message._CheckCalledFromGeneratedFile()
557
+ if is_extension:
558
+ return _message.default_pool.FindExtensionByName(full_name)
559
+ else:
560
+ return _message.default_pool.FindFieldByName(full_name)
561
+
562
+ def __init__(self, name, full_name, index, number, type, cpp_type, label,
563
+ default_value, message_type, enum_type, containing_type,
564
+ is_extension, extension_scope, options=None,
565
+ serialized_options=None,
566
+ has_default_value=True, containing_oneof=None, json_name=None,
567
+ file=None, create_key=None): # pylint: disable=redefined-builtin
568
+ """The arguments are as described in the description of FieldDescriptor
569
+ attributes above.
570
+
571
+ Note that containing_type may be None, and may be set later if necessary
572
+ (to deal with circular references between message types, for example).
573
+ Likewise for extension_scope.
574
+ """
575
+ if create_key is not _internal_create_key:
576
+ _Deprecated('FieldDescriptor')
577
+
578
+ super(FieldDescriptor, self).__init__(
579
+ options, serialized_options, 'FieldOptions')
580
+ self.name = name
581
+ self.full_name = full_name
582
+ self.file = file
583
+ self._camelcase_name = None
584
+ if json_name is None:
585
+ self.json_name = _ToJsonName(name)
586
+ else:
587
+ self.json_name = json_name
588
+ self.index = index
589
+ self.number = number
590
+ self.type = type
591
+ self.cpp_type = cpp_type
592
+ self.label = label
593
+ self.has_default_value = has_default_value
594
+ self.default_value = default_value
595
+ self.containing_type = containing_type
596
+ self.message_type = message_type
597
+ self.enum_type = enum_type
598
+ self.is_extension = is_extension
599
+ self.extension_scope = extension_scope
600
+ self.containing_oneof = containing_oneof
601
+ if api_implementation.Type() == 'cpp':
602
+ if is_extension:
603
+ self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
604
+ else:
605
+ self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
606
+ else:
607
+ self._cdescriptor = None
608
+
609
+ @property
610
+ def camelcase_name(self):
611
+ """Camelcase name of this field.
612
+
613
+ Returns:
614
+ str: the name in CamelCase.
615
+ """
616
+ if self._camelcase_name is None:
617
+ self._camelcase_name = _ToCamelCase(self.name)
618
+ return self._camelcase_name
619
+
620
+ @property
621
+ def has_presence(self):
622
+ """Whether the field distinguishes between unpopulated and default values.
623
+
624
+ Raises:
625
+ RuntimeError: singular field that is not linked with message nor file.
626
+ """
627
+ if self.label == FieldDescriptor.LABEL_REPEATED:
628
+ return False
629
+ if (self.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE or
630
+ self.containing_oneof):
631
+ return True
632
+ if hasattr(self.file, 'syntax'):
633
+ return self.file.syntax == 'proto2'
634
+ if hasattr(self.message_type, 'syntax'):
635
+ return self.message_type.syntax == 'proto2'
636
+ raise RuntimeError(
637
+ 'has_presence is not ready to use because field %s is not'
638
+ ' linked with message type nor file' % self.full_name)
639
+
640
+ @staticmethod
641
+ def ProtoTypeToCppProtoType(proto_type):
642
+ """Converts from a Python proto type to a C++ Proto Type.
643
+
644
+ The Python ProtocolBuffer classes specify both the 'Python' datatype and the
645
+ 'C++' datatype - and they're not the same. This helper method should
646
+ translate from one to another.
647
+
648
+ Args:
649
+ proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
650
+ Returns:
651
+ int: descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
652
+ Raises:
653
+ TypeTransformationError: when the Python proto type isn't known.
654
+ """
655
+ try:
656
+ return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
657
+ except KeyError:
658
+ raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
659
+
660
+
661
+ class EnumDescriptor(_NestedDescriptorBase):
662
+
663
+ """Descriptor for an enum defined in a .proto file.
664
+
665
+ Attributes:
666
+ name (str): Name of the enum type.
667
+ full_name (str): Full name of the type, including package name
668
+ and any enclosing type(s).
669
+
670
+ values (list[EnumValueDescriptor]): List of the values
671
+ in this enum.
672
+ values_by_name (dict(str, EnumValueDescriptor)): Same as :attr:`values`,
673
+ but indexed by the "name" field of each EnumValueDescriptor.
674
+ values_by_number (dict(int, EnumValueDescriptor)): Same as :attr:`values`,
675
+ but indexed by the "number" field of each EnumValueDescriptor.
676
+ containing_type (Descriptor): Descriptor of the immediate containing
677
+ type of this enum, or None if this is an enum defined at the
678
+ top level in a .proto file. Set by Descriptor's constructor
679
+ if we're passed into one.
680
+ file (FileDescriptor): Reference to file descriptor.
681
+ options (descriptor_pb2.EnumOptions): Enum options message or
682
+ None to use default enum options.
683
+ """
684
+
685
+ if _USE_C_DESCRIPTORS:
686
+ _C_DESCRIPTOR_CLASS = _message.EnumDescriptor
687
+
688
+ def __new__(cls, name, full_name, filename, values,
689
+ containing_type=None, options=None,
690
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
691
+ serialized_start=None, serialized_end=None, create_key=None):
692
+ _message.Message._CheckCalledFromGeneratedFile()
693
+ return _message.default_pool.FindEnumTypeByName(full_name)
694
+
695
+ def __init__(self, name, full_name, filename, values,
696
+ containing_type=None, options=None,
697
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
698
+ serialized_start=None, serialized_end=None, create_key=None):
699
+ """Arguments are as described in the attribute description above.
700
+
701
+ Note that filename is an obsolete argument, that is not used anymore.
702
+ Please use file.name to access this as an attribute.
703
+ """
704
+ if create_key is not _internal_create_key:
705
+ _Deprecated('EnumDescriptor')
706
+
707
+ super(EnumDescriptor, self).__init__(
708
+ options, 'EnumOptions', name, full_name, file,
709
+ containing_type, serialized_start=serialized_start,
710
+ serialized_end=serialized_end, serialized_options=serialized_options)
711
+
712
+ self.values = values
713
+ for value in self.values:
714
+ value.type = self
715
+ self.values_by_name = dict((v.name, v) for v in values)
716
+ # Values are reversed to ensure that the first alias is retained.
717
+ self.values_by_number = dict((v.number, v) for v in reversed(values))
718
+
719
+ def CopyToProto(self, proto):
720
+ """Copies this to a descriptor_pb2.EnumDescriptorProto.
721
+
722
+ Args:
723
+ proto (descriptor_pb2.EnumDescriptorProto): An empty descriptor proto.
724
+ """
725
+ # This function is overridden to give a better doc comment.
726
+ super(EnumDescriptor, self).CopyToProto(proto)
727
+
728
+
729
+ class EnumValueDescriptor(DescriptorBase):
730
+
731
+ """Descriptor for a single value within an enum.
732
+
733
+ Attributes:
734
+ name (str): Name of this value.
735
+ index (int): Dense, 0-indexed index giving the order that this
736
+ value appears textually within its enum in the .proto file.
737
+ number (int): Actual number assigned to this enum value.
738
+ type (EnumDescriptor): :class:`EnumDescriptor` to which this value
739
+ belongs. Set by :class:`EnumDescriptor`'s constructor if we're
740
+ passed into one.
741
+ options (descriptor_pb2.EnumValueOptions): Enum value options message or
742
+ None to use default enum value options options.
743
+ """
744
+
745
+ if _USE_C_DESCRIPTORS:
746
+ _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
747
+
748
+ def __new__(cls, name, index, number,
749
+ type=None, # pylint: disable=redefined-builtin
750
+ options=None, serialized_options=None, create_key=None):
751
+ _message.Message._CheckCalledFromGeneratedFile()
752
+ # There is no way we can build a complete EnumValueDescriptor with the
753
+ # given parameters (the name of the Enum is not known, for example).
754
+ # Fortunately generated files just pass it to the EnumDescriptor()
755
+ # constructor, which will ignore it, so returning None is good enough.
756
+ return None
757
+
758
+ def __init__(self, name, index, number,
759
+ type=None, # pylint: disable=redefined-builtin
760
+ options=None, serialized_options=None, create_key=None):
761
+ """Arguments are as described in the attribute description above."""
762
+ if create_key is not _internal_create_key:
763
+ _Deprecated('EnumValueDescriptor')
764
+
765
+ super(EnumValueDescriptor, self).__init__(
766
+ options, serialized_options, 'EnumValueOptions')
767
+ self.name = name
768
+ self.index = index
769
+ self.number = number
770
+ self.type = type
771
+
772
+
773
+ class OneofDescriptor(DescriptorBase):
774
+ """Descriptor for a oneof field.
775
+
776
+ Attributes:
777
+ name (str): Name of the oneof field.
778
+ full_name (str): Full name of the oneof field, including package name.
779
+ index (int): 0-based index giving the order of the oneof field inside
780
+ its containing type.
781
+ containing_type (Descriptor): :class:`Descriptor` of the protocol message
782
+ type that contains this field. Set by the :class:`Descriptor` constructor
783
+ if we're passed into one.
784
+ fields (list[FieldDescriptor]): The list of field descriptors this
785
+ oneof can contain.
786
+ """
787
+
788
+ if _USE_C_DESCRIPTORS:
789
+ _C_DESCRIPTOR_CLASS = _message.OneofDescriptor
790
+
791
+ def __new__(
792
+ cls, name, full_name, index, containing_type, fields, options=None,
793
+ serialized_options=None, create_key=None):
794
+ _message.Message._CheckCalledFromGeneratedFile()
795
+ return _message.default_pool.FindOneofByName(full_name)
796
+
797
+ def __init__(
798
+ self, name, full_name, index, containing_type, fields, options=None,
799
+ serialized_options=None, create_key=None):
800
+ """Arguments are as described in the attribute description above."""
801
+ if create_key is not _internal_create_key:
802
+ _Deprecated('OneofDescriptor')
803
+
804
+ super(OneofDescriptor, self).__init__(
805
+ options, serialized_options, 'OneofOptions')
806
+ self.name = name
807
+ self.full_name = full_name
808
+ self.index = index
809
+ self.containing_type = containing_type
810
+ self.fields = fields
811
+
812
+
813
+ class ServiceDescriptor(_NestedDescriptorBase):
814
+
815
+ """Descriptor for a service.
816
+
817
+ Attributes:
818
+ name (str): Name of the service.
819
+ full_name (str): Full name of the service, including package name.
820
+ index (int): 0-indexed index giving the order that this services
821
+ definition appears within the .proto file.
822
+ methods (list[MethodDescriptor]): List of methods provided by this
823
+ service.
824
+ methods_by_name (dict(str, MethodDescriptor)): Same
825
+ :class:`MethodDescriptor` objects as in :attr:`methods_by_name`, but
826
+ indexed by "name" attribute in each :class:`MethodDescriptor`.
827
+ options (descriptor_pb2.ServiceOptions): Service options message or
828
+ None to use default service options.
829
+ file (FileDescriptor): Reference to file info.
830
+ """
831
+
832
+ if _USE_C_DESCRIPTORS:
833
+ _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
834
+
835
+ def __new__(
836
+ cls,
837
+ name=None,
838
+ full_name=None,
839
+ index=None,
840
+ methods=None,
841
+ options=None,
842
+ serialized_options=None,
843
+ file=None, # pylint: disable=redefined-builtin
844
+ serialized_start=None,
845
+ serialized_end=None,
846
+ create_key=None):
847
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
848
+ return _message.default_pool.FindServiceByName(full_name)
849
+
850
+ def __init__(self, name, full_name, index, methods, options=None,
851
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
852
+ serialized_start=None, serialized_end=None, create_key=None):
853
+ if create_key is not _internal_create_key:
854
+ _Deprecated('ServiceDescriptor')
855
+
856
+ super(ServiceDescriptor, self).__init__(
857
+ options, 'ServiceOptions', name, full_name, file,
858
+ None, serialized_start=serialized_start,
859
+ serialized_end=serialized_end, serialized_options=serialized_options)
860
+ self.index = index
861
+ self.methods = methods
862
+ self.methods_by_name = dict((m.name, m) for m in methods)
863
+ # Set the containing service for each method in this service.
864
+ for method in self.methods:
865
+ method.containing_service = self
866
+
867
+ def FindMethodByName(self, name):
868
+ """Searches for the specified method, and returns its descriptor.
869
+
870
+ Args:
871
+ name (str): Name of the method.
872
+ Returns:
873
+ MethodDescriptor or None: the descriptor for the requested method, if
874
+ found.
875
+ """
876
+ return self.methods_by_name.get(name, None)
877
+
878
+ def CopyToProto(self, proto):
879
+ """Copies this to a descriptor_pb2.ServiceDescriptorProto.
880
+
881
+ Args:
882
+ proto (descriptor_pb2.ServiceDescriptorProto): An empty descriptor proto.
883
+ """
884
+ # This function is overridden to give a better doc comment.
885
+ super(ServiceDescriptor, self).CopyToProto(proto)
886
+
887
+
888
+ class MethodDescriptor(DescriptorBase):
889
+
890
+ """Descriptor for a method in a service.
891
+
892
+ Attributes:
893
+ name (str): Name of the method within the service.
894
+ full_name (str): Full name of method.
895
+ index (int): 0-indexed index of the method inside the service.
896
+ containing_service (ServiceDescriptor): The service that contains this
897
+ method.
898
+ input_type (Descriptor): The descriptor of the message that this method
899
+ accepts.
900
+ output_type (Descriptor): The descriptor of the message that this method
901
+ returns.
902
+ client_streaming (bool): Whether this method uses client streaming.
903
+ server_streaming (bool): Whether this method uses server streaming.
904
+ options (descriptor_pb2.MethodOptions or None): Method options message, or
905
+ None to use default method options.
906
+ """
907
+
908
+ if _USE_C_DESCRIPTORS:
909
+ _C_DESCRIPTOR_CLASS = _message.MethodDescriptor
910
+
911
+ def __new__(cls,
912
+ name,
913
+ full_name,
914
+ index,
915
+ containing_service,
916
+ input_type,
917
+ output_type,
918
+ client_streaming=False,
919
+ server_streaming=False,
920
+ options=None,
921
+ serialized_options=None,
922
+ create_key=None):
923
+ _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
924
+ return _message.default_pool.FindMethodByName(full_name)
925
+
926
+ def __init__(self,
927
+ name,
928
+ full_name,
929
+ index,
930
+ containing_service,
931
+ input_type,
932
+ output_type,
933
+ client_streaming=False,
934
+ server_streaming=False,
935
+ options=None,
936
+ serialized_options=None,
937
+ create_key=None):
938
+ """The arguments are as described in the description of MethodDescriptor
939
+ attributes above.
940
+
941
+ Note that containing_service may be None, and may be set later if necessary.
942
+ """
943
+ if create_key is not _internal_create_key:
944
+ _Deprecated('MethodDescriptor')
945
+
946
+ super(MethodDescriptor, self).__init__(
947
+ options, serialized_options, 'MethodOptions')
948
+ self.name = name
949
+ self.full_name = full_name
950
+ self.index = index
951
+ self.containing_service = containing_service
952
+ self.input_type = input_type
953
+ self.output_type = output_type
954
+ self.client_streaming = client_streaming
955
+ self.server_streaming = server_streaming
956
+
957
+ def CopyToProto(self, proto):
958
+ """Copies this to a descriptor_pb2.MethodDescriptorProto.
959
+
960
+ Args:
961
+ proto (descriptor_pb2.MethodDescriptorProto): An empty descriptor proto.
962
+
963
+ Raises:
964
+ Error: If self couldn't be serialized, due to too few constructor
965
+ arguments.
966
+ """
967
+ if self.containing_service is not None:
968
+ from google.protobuf import descriptor_pb2
969
+ service_proto = descriptor_pb2.ServiceDescriptorProto()
970
+ self.containing_service.CopyToProto(service_proto)
971
+ proto.CopyFrom(service_proto.method[self.index])
972
+ else:
973
+ raise Error('Descriptor does not contain a service.')
974
+
975
+
976
+ class FileDescriptor(DescriptorBase):
977
+ """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
978
+
979
+ Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and
980
+ :attr:`dependencies` fields are only set by the
981
+ :py:mod:`google.protobuf.message_factory` module, and not by the generated
982
+ proto code.
983
+
984
+ Attributes:
985
+ name (str): Name of file, relative to root of source tree.
986
+ package (str): Name of the package
987
+ syntax (str): string indicating syntax of the file (can be "proto2" or
988
+ "proto3")
989
+ serialized_pb (bytes): Byte string of serialized
990
+ :class:`descriptor_pb2.FileDescriptorProto`.
991
+ dependencies (list[FileDescriptor]): List of other :class:`FileDescriptor`
992
+ objects this :class:`FileDescriptor` depends on.
993
+ public_dependencies (list[FileDescriptor]): A subset of
994
+ :attr:`dependencies`, which were declared as "public".
995
+ message_types_by_name (dict(str, Descriptor)): Mapping from message names
996
+ to their :class:`Descriptor`.
997
+ enum_types_by_name (dict(str, EnumDescriptor)): Mapping from enum names to
998
+ their :class:`EnumDescriptor`.
999
+ extensions_by_name (dict(str, FieldDescriptor)): Mapping from extension
1000
+ names declared at file scope to their :class:`FieldDescriptor`.
1001
+ services_by_name (dict(str, ServiceDescriptor)): Mapping from services'
1002
+ names to their :class:`ServiceDescriptor`.
1003
+ pool (DescriptorPool): The pool this descriptor belongs to. When not
1004
+ passed to the constructor, the global default pool is used.
1005
+ """
1006
+
1007
+ if _USE_C_DESCRIPTORS:
1008
+ _C_DESCRIPTOR_CLASS = _message.FileDescriptor
1009
+
1010
+ def __new__(cls, name, package, options=None,
1011
+ serialized_options=None, serialized_pb=None,
1012
+ dependencies=None, public_dependencies=None,
1013
+ syntax=None, pool=None, create_key=None):
1014
+ # FileDescriptor() is called from various places, not only from generated
1015
+ # files, to register dynamic proto files and messages.
1016
+ # pylint: disable=g-explicit-bool-comparison
1017
+ if serialized_pb == b'':
1018
+ # Cpp generated code must be linked in if serialized_pb is ''
1019
+ try:
1020
+ return _message.default_pool.FindFileByName(name)
1021
+ except KeyError:
1022
+ raise RuntimeError('Please link in cpp generated lib for %s' % (name))
1023
+ elif serialized_pb:
1024
+ return _message.default_pool.AddSerializedFile(serialized_pb)
1025
+ else:
1026
+ return super(FileDescriptor, cls).__new__(cls)
1027
+
1028
+ def __init__(self, name, package, options=None,
1029
+ serialized_options=None, serialized_pb=None,
1030
+ dependencies=None, public_dependencies=None,
1031
+ syntax=None, pool=None, create_key=None):
1032
+ """Constructor."""
1033
+ if create_key is not _internal_create_key:
1034
+ _Deprecated('FileDescriptor')
1035
+
1036
+ super(FileDescriptor, self).__init__(
1037
+ options, serialized_options, 'FileOptions')
1038
+
1039
+ if pool is None:
1040
+ from google.protobuf import descriptor_pool
1041
+ pool = descriptor_pool.Default()
1042
+ self.pool = pool
1043
+ self.message_types_by_name = {}
1044
+ self.name = name
1045
+ self.package = package
1046
+ self.syntax = syntax or "proto2"
1047
+ self.serialized_pb = serialized_pb
1048
+
1049
+ self.enum_types_by_name = {}
1050
+ self.extensions_by_name = {}
1051
+ self.services_by_name = {}
1052
+ self.dependencies = (dependencies or [])
1053
+ self.public_dependencies = (public_dependencies or [])
1054
+
1055
+ def CopyToProto(self, proto):
1056
+ """Copies this to a descriptor_pb2.FileDescriptorProto.
1057
+
1058
+ Args:
1059
+ proto: An empty descriptor_pb2.FileDescriptorProto.
1060
+ """
1061
+ proto.ParseFromString(self.serialized_pb)
1062
+
1063
+
1064
+ def _ParseOptions(message, string):
1065
+ """Parses serialized options.
1066
+
1067
+ This helper function is used to parse serialized options in generated
1068
+ proto2 files. It must not be used outside proto2.
1069
+ """
1070
+ message.ParseFromString(string)
1071
+ return message
1072
+
1073
+
1074
+ def _ToCamelCase(name):
1075
+ """Converts name to camel-case and returns it."""
1076
+ capitalize_next = False
1077
+ result = []
1078
+
1079
+ for c in name:
1080
+ if c == '_':
1081
+ if result:
1082
+ capitalize_next = True
1083
+ elif capitalize_next:
1084
+ result.append(c.upper())
1085
+ capitalize_next = False
1086
+ else:
1087
+ result += c
1088
+
1089
+ # Lower-case the first letter.
1090
+ if result and result[0].isupper():
1091
+ result[0] = result[0].lower()
1092
+ return ''.join(result)
1093
+
1094
+
1095
+ def _OptionsOrNone(descriptor_proto):
1096
+ """Returns the value of the field `options`, or None if it is not set."""
1097
+ if descriptor_proto.HasField('options'):
1098
+ return descriptor_proto.options
1099
+ else:
1100
+ return None
1101
+
1102
+
1103
+ def _ToJsonName(name):
1104
+ """Converts name to Json name and returns it."""
1105
+ capitalize_next = False
1106
+ result = []
1107
+
1108
+ for c in name:
1109
+ if c == '_':
1110
+ capitalize_next = True
1111
+ elif capitalize_next:
1112
+ result.append(c.upper())
1113
+ capitalize_next = False
1114
+ else:
1115
+ result += c
1116
+
1117
+ return ''.join(result)
1118
+
1119
+
1120
+ def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
1121
+ syntax=None):
1122
+ """Make a protobuf Descriptor given a DescriptorProto protobuf.
1123
+
1124
+ Handles nested descriptors. Note that this is limited to the scope of defining
1125
+ a message inside of another message. Composite fields can currently only be
1126
+ resolved if the message is defined in the same scope as the field.
1127
+
1128
+ Args:
1129
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
1130
+ package: Optional package name for the new message Descriptor (string).
1131
+ build_file_if_cpp: Update the C++ descriptor pool if api matches.
1132
+ Set to False on recursion, so no duplicates are created.
1133
+ syntax: The syntax/semantics that should be used. Set to "proto3" to get
1134
+ proto3 field presence semantics.
1135
+ Returns:
1136
+ A Descriptor for protobuf messages.
1137
+ """
1138
+ if api_implementation.Type() == 'cpp' and build_file_if_cpp:
1139
+ # The C++ implementation requires all descriptors to be backed by the same
1140
+ # definition in the C++ descriptor pool. To do this, we build a
1141
+ # FileDescriptorProto with the same definition as this descriptor and build
1142
+ # it into the pool.
1143
+ from google.protobuf import descriptor_pb2
1144
+ file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
1145
+ file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
1146
+
1147
+ # Generate a random name for this proto file to prevent conflicts with any
1148
+ # imported ones. We need to specify a file name so the descriptor pool
1149
+ # accepts our FileDescriptorProto, but it is not important what that file
1150
+ # name is actually set to.
1151
+ proto_name = binascii.hexlify(os.urandom(16)).decode('ascii')
1152
+
1153
+ if package:
1154
+ file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
1155
+ proto_name + '.proto')
1156
+ file_descriptor_proto.package = package
1157
+ else:
1158
+ file_descriptor_proto.name = proto_name + '.proto'
1159
+
1160
+ _message.default_pool.Add(file_descriptor_proto)
1161
+ result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
1162
+
1163
+ if _USE_C_DESCRIPTORS:
1164
+ return result.message_types_by_name[desc_proto.name]
1165
+
1166
+ full_message_name = [desc_proto.name]
1167
+ if package: full_message_name.insert(0, package)
1168
+
1169
+ # Create Descriptors for enum types
1170
+ enum_types = {}
1171
+ for enum_proto in desc_proto.enum_type:
1172
+ full_name = '.'.join(full_message_name + [enum_proto.name])
1173
+ enum_desc = EnumDescriptor(
1174
+ enum_proto.name, full_name, None, [
1175
+ EnumValueDescriptor(enum_val.name, ii, enum_val.number,
1176
+ create_key=_internal_create_key)
1177
+ for ii, enum_val in enumerate(enum_proto.value)],
1178
+ create_key=_internal_create_key)
1179
+ enum_types[full_name] = enum_desc
1180
+
1181
+ # Create Descriptors for nested types
1182
+ nested_types = {}
1183
+ for nested_proto in desc_proto.nested_type:
1184
+ full_name = '.'.join(full_message_name + [nested_proto.name])
1185
+ # Nested types are just those defined inside of the message, not all types
1186
+ # used by fields in the message, so no loops are possible here.
1187
+ nested_desc = MakeDescriptor(nested_proto,
1188
+ package='.'.join(full_message_name),
1189
+ build_file_if_cpp=False,
1190
+ syntax=syntax)
1191
+ nested_types[full_name] = nested_desc
1192
+
1193
+ fields = []
1194
+ for field_proto in desc_proto.field:
1195
+ full_name = '.'.join(full_message_name + [field_proto.name])
1196
+ enum_desc = None
1197
+ nested_desc = None
1198
+ if field_proto.json_name:
1199
+ json_name = field_proto.json_name
1200
+ else:
1201
+ json_name = None
1202
+ if field_proto.HasField('type_name'):
1203
+ type_name = field_proto.type_name
1204
+ full_type_name = '.'.join(full_message_name +
1205
+ [type_name[type_name.rfind('.')+1:]])
1206
+ if full_type_name in nested_types:
1207
+ nested_desc = nested_types[full_type_name]
1208
+ elif full_type_name in enum_types:
1209
+ enum_desc = enum_types[full_type_name]
1210
+ # Else type_name references a non-local type, which isn't implemented
1211
+ field = FieldDescriptor(
1212
+ field_proto.name, full_name, field_proto.number - 1,
1213
+ field_proto.number, field_proto.type,
1214
+ FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
1215
+ field_proto.label, None, nested_desc, enum_desc, None, False, None,
1216
+ options=_OptionsOrNone(field_proto), has_default_value=False,
1217
+ json_name=json_name, create_key=_internal_create_key)
1218
+ fields.append(field)
1219
+
1220
+ desc_name = '.'.join(full_message_name)
1221
+ return Descriptor(desc_proto.name, desc_name, None, None, fields,
1222
+ list(nested_types.values()), list(enum_types.values()), [],
1223
+ options=_OptionsOrNone(desc_proto),
1224
+ create_key=_internal_create_key)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/descriptor_pool.py ADDED
@@ -0,0 +1,1295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """Provides DescriptorPool to use as a container for proto2 descriptors.
32
+
33
+ The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
34
+ a collection of protocol buffer descriptors for use when dynamically creating
35
+ message types at runtime.
36
+
37
+ For most applications protocol buffers should be used via modules generated by
38
+ the protocol buffer compiler tool. This should only be used when the type of
39
+ protocol buffers used in an application or library cannot be predetermined.
40
+
41
+ Below is a straightforward example on how to use this class::
42
+
43
+ pool = DescriptorPool()
44
+ file_descriptor_protos = [ ... ]
45
+ for file_descriptor_proto in file_descriptor_protos:
46
+ pool.Add(file_descriptor_proto)
47
+ my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
48
+
49
+ The message descriptor can be used in conjunction with the message_factory
50
+ module in order to create a protocol buffer class that can be encoded and
51
+ decoded.
52
+
53
+ If you want to get a Python class for the specified proto, use the
54
+ helper functions inside google.protobuf.message_factory
55
+ directly instead of this class.
56
+ """
57
+
58
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
59
+
60
+ import collections
61
+ import warnings
62
+
63
+ from google.protobuf import descriptor
64
+ from google.protobuf import descriptor_database
65
+ from google.protobuf import text_encoding
66
+
67
+
68
+ _USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
69
+
70
+
71
+ def _Deprecated(func):
72
+ """Mark functions as deprecated."""
73
+
74
+ def NewFunc(*args, **kwargs):
75
+ warnings.warn(
76
+ 'Call to deprecated function %s(). Note: Do add unlinked descriptors '
77
+ 'to descriptor_pool is wrong. Use Add() or AddSerializedFile() '
78
+ 'instead.' % func.__name__,
79
+ category=DeprecationWarning)
80
+ return func(*args, **kwargs)
81
+ NewFunc.__name__ = func.__name__
82
+ NewFunc.__doc__ = func.__doc__
83
+ NewFunc.__dict__.update(func.__dict__)
84
+ return NewFunc
85
+
86
+
87
+ def _NormalizeFullyQualifiedName(name):
88
+ """Remove leading period from fully-qualified type name.
89
+
90
+ Due to b/13860351 in descriptor_database.py, types in the root namespace are
91
+ generated with a leading period. This function removes that prefix.
92
+
93
+ Args:
94
+ name (str): The fully-qualified symbol name.
95
+
96
+ Returns:
97
+ str: The normalized fully-qualified symbol name.
98
+ """
99
+ return name.lstrip('.')
100
+
101
+
102
+ def _OptionsOrNone(descriptor_proto):
103
+ """Returns the value of the field `options`, or None if it is not set."""
104
+ if descriptor_proto.HasField('options'):
105
+ return descriptor_proto.options
106
+ else:
107
+ return None
108
+
109
+
110
+ def _IsMessageSetExtension(field):
111
+ return (field.is_extension and
112
+ field.containing_type.has_options and
113
+ field.containing_type.GetOptions().message_set_wire_format and
114
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
115
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL)
116
+
117
+
118
+ class DescriptorPool(object):
119
+ """A collection of protobufs dynamically constructed by descriptor protos."""
120
+
121
+ if _USE_C_DESCRIPTORS:
122
+
123
+ def __new__(cls, descriptor_db=None):
124
+ # pylint: disable=protected-access
125
+ return descriptor._message.DescriptorPool(descriptor_db)
126
+
127
+ def __init__(self, descriptor_db=None):
128
+ """Initializes a Pool of proto buffs.
129
+
130
+ The descriptor_db argument to the constructor is provided to allow
131
+ specialized file descriptor proto lookup code to be triggered on demand. An
132
+ example would be an implementation which will read and compile a file
133
+ specified in a call to FindFileByName() and not require the call to Add()
134
+ at all. Results from this database will be cached internally here as well.
135
+
136
+ Args:
137
+ descriptor_db: A secondary source of file descriptors.
138
+ """
139
+
140
+ self._internal_db = descriptor_database.DescriptorDatabase()
141
+ self._descriptor_db = descriptor_db
142
+ self._descriptors = {}
143
+ self._enum_descriptors = {}
144
+ self._service_descriptors = {}
145
+ self._file_descriptors = {}
146
+ self._toplevel_extensions = {}
147
+ # TODO(jieluo): Remove _file_desc_by_toplevel_extension after
148
+ # maybe year 2020 for compatibility issue (with 3.4.1 only).
149
+ self._file_desc_by_toplevel_extension = {}
150
+ self._top_enum_values = {}
151
+ # We store extensions in two two-level mappings: The first key is the
152
+ # descriptor of the message being extended, the second key is the extension
153
+ # full name or its tag number.
154
+ self._extensions_by_name = collections.defaultdict(dict)
155
+ self._extensions_by_number = collections.defaultdict(dict)
156
+
157
+ def _CheckConflictRegister(self, desc, desc_name, file_name):
158
+ """Check if the descriptor name conflicts with another of the same name.
159
+
160
+ Args:
161
+ desc: Descriptor of a message, enum, service, extension or enum value.
162
+ desc_name (str): the full name of desc.
163
+ file_name (str): The file name of descriptor.
164
+ """
165
+ for register, descriptor_type in [
166
+ (self._descriptors, descriptor.Descriptor),
167
+ (self._enum_descriptors, descriptor.EnumDescriptor),
168
+ (self._service_descriptors, descriptor.ServiceDescriptor),
169
+ (self._toplevel_extensions, descriptor.FieldDescriptor),
170
+ (self._top_enum_values, descriptor.EnumValueDescriptor)]:
171
+ if desc_name in register:
172
+ old_desc = register[desc_name]
173
+ if isinstance(old_desc, descriptor.EnumValueDescriptor):
174
+ old_file = old_desc.type.file.name
175
+ else:
176
+ old_file = old_desc.file.name
177
+
178
+ if not isinstance(desc, descriptor_type) or (
179
+ old_file != file_name):
180
+ error_msg = ('Conflict register for file "' + file_name +
181
+ '": ' + desc_name +
182
+ ' is already defined in file "' +
183
+ old_file + '". Please fix the conflict by adding '
184
+ 'package name on the proto file, or use different '
185
+ 'name for the duplication.')
186
+ if isinstance(desc, descriptor.EnumValueDescriptor):
187
+ error_msg += ('\nNote: enum values appear as '
188
+ 'siblings of the enum type instead of '
189
+ 'children of it.')
190
+
191
+ raise TypeError(error_msg)
192
+
193
+ return
194
+
195
+ def Add(self, file_desc_proto):
196
+ """Adds the FileDescriptorProto and its types to this pool.
197
+
198
+ Args:
199
+ file_desc_proto (FileDescriptorProto): The file descriptor to add.
200
+ """
201
+
202
+ self._internal_db.Add(file_desc_proto)
203
+
204
+ def AddSerializedFile(self, serialized_file_desc_proto):
205
+ """Adds the FileDescriptorProto and its types to this pool.
206
+
207
+ Args:
208
+ serialized_file_desc_proto (bytes): A bytes string, serialization of the
209
+ :class:`FileDescriptorProto` to add.
210
+
211
+ Returns:
212
+ FileDescriptor: Descriptor for the added file.
213
+ """
214
+
215
+ # pylint: disable=g-import-not-at-top
216
+ from google.protobuf import descriptor_pb2
217
+ file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
218
+ serialized_file_desc_proto)
219
+ file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto)
220
+ file_desc.serialized_pb = serialized_file_desc_proto
221
+ return file_desc
222
+
223
+ # Add Descriptor to descriptor pool is dreprecated. Please use Add()
224
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
225
+ @_Deprecated
226
+ def AddDescriptor(self, desc):
227
+ self._AddDescriptor(desc)
228
+
229
+ # Never call this method. It is for internal usage only.
230
+ def _AddDescriptor(self, desc):
231
+ """Adds a Descriptor to the pool, non-recursively.
232
+
233
+ If the Descriptor contains nested messages or enums, the caller must
234
+ explicitly register them. This method also registers the FileDescriptor
235
+ associated with the message.
236
+
237
+ Args:
238
+ desc: A Descriptor.
239
+ """
240
+ if not isinstance(desc, descriptor.Descriptor):
241
+ raise TypeError('Expected instance of descriptor.Descriptor.')
242
+
243
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
244
+
245
+ self._descriptors[desc.full_name] = desc
246
+ self._AddFileDescriptor(desc.file)
247
+
248
+ # Add EnumDescriptor to descriptor pool is dreprecated. Please use Add()
249
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
250
+ @_Deprecated
251
+ def AddEnumDescriptor(self, enum_desc):
252
+ self._AddEnumDescriptor(enum_desc)
253
+
254
+ # Never call this method. It is for internal usage only.
255
+ def _AddEnumDescriptor(self, enum_desc):
256
+ """Adds an EnumDescriptor to the pool.
257
+
258
+ This method also registers the FileDescriptor associated with the enum.
259
+
260
+ Args:
261
+ enum_desc: An EnumDescriptor.
262
+ """
263
+
264
+ if not isinstance(enum_desc, descriptor.EnumDescriptor):
265
+ raise TypeError('Expected instance of descriptor.EnumDescriptor.')
266
+
267
+ file_name = enum_desc.file.name
268
+ self._CheckConflictRegister(enum_desc, enum_desc.full_name, file_name)
269
+ self._enum_descriptors[enum_desc.full_name] = enum_desc
270
+
271
+ # Top enum values need to be indexed.
272
+ # Count the number of dots to see whether the enum is toplevel or nested
273
+ # in a message. We cannot use enum_desc.containing_type at this stage.
274
+ if enum_desc.file.package:
275
+ top_level = (enum_desc.full_name.count('.')
276
+ - enum_desc.file.package.count('.') == 1)
277
+ else:
278
+ top_level = enum_desc.full_name.count('.') == 0
279
+ if top_level:
280
+ file_name = enum_desc.file.name
281
+ package = enum_desc.file.package
282
+ for enum_value in enum_desc.values:
283
+ full_name = _NormalizeFullyQualifiedName(
284
+ '.'.join((package, enum_value.name)))
285
+ self._CheckConflictRegister(enum_value, full_name, file_name)
286
+ self._top_enum_values[full_name] = enum_value
287
+ self._AddFileDescriptor(enum_desc.file)
288
+
289
+ # Add ServiceDescriptor to descriptor pool is dreprecated. Please use Add()
290
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
291
+ @_Deprecated
292
+ def AddServiceDescriptor(self, service_desc):
293
+ self._AddServiceDescriptor(service_desc)
294
+
295
+ # Never call this method. It is for internal usage only.
296
+ def _AddServiceDescriptor(self, service_desc):
297
+ """Adds a ServiceDescriptor to the pool.
298
+
299
+ Args:
300
+ service_desc: A ServiceDescriptor.
301
+ """
302
+
303
+ if not isinstance(service_desc, descriptor.ServiceDescriptor):
304
+ raise TypeError('Expected instance of descriptor.ServiceDescriptor.')
305
+
306
+ self._CheckConflictRegister(service_desc, service_desc.full_name,
307
+ service_desc.file.name)
308
+ self._service_descriptors[service_desc.full_name] = service_desc
309
+
310
+ # Add ExtensionDescriptor to descriptor pool is dreprecated. Please use Add()
311
+ # or AddSerializedFile() to add a FileDescriptorProto instead.
312
+ @_Deprecated
313
+ def AddExtensionDescriptor(self, extension):
314
+ self._AddExtensionDescriptor(extension)
315
+
316
+ # Never call this method. It is for internal usage only.
317
+ def _AddExtensionDescriptor(self, extension):
318
+ """Adds a FieldDescriptor describing an extension to the pool.
319
+
320
+ Args:
321
+ extension: A FieldDescriptor.
322
+
323
+ Raises:
324
+ AssertionError: when another extension with the same number extends the
325
+ same message.
326
+ TypeError: when the specified extension is not a
327
+ descriptor.FieldDescriptor.
328
+ """
329
+ if not (isinstance(extension, descriptor.FieldDescriptor) and
330
+ extension.is_extension):
331
+ raise TypeError('Expected an extension descriptor.')
332
+
333
+ if extension.extension_scope is None:
334
+ self._toplevel_extensions[extension.full_name] = extension
335
+
336
+ try:
337
+ existing_desc = self._extensions_by_number[
338
+ extension.containing_type][extension.number]
339
+ except KeyError:
340
+ pass
341
+ else:
342
+ if extension is not existing_desc:
343
+ raise AssertionError(
344
+ 'Extensions "%s" and "%s" both try to extend message type "%s" '
345
+ 'with field number %d.' %
346
+ (extension.full_name, existing_desc.full_name,
347
+ extension.containing_type.full_name, extension.number))
348
+
349
+ self._extensions_by_number[extension.containing_type][
350
+ extension.number] = extension
351
+ self._extensions_by_name[extension.containing_type][
352
+ extension.full_name] = extension
353
+
354
+ # Also register MessageSet extensions with the type name.
355
+ if _IsMessageSetExtension(extension):
356
+ self._extensions_by_name[extension.containing_type][
357
+ extension.message_type.full_name] = extension
358
+
359
+ @_Deprecated
360
+ def AddFileDescriptor(self, file_desc):
361
+ self._InternalAddFileDescriptor(file_desc)
362
+
363
+ # Never call this method. It is for internal usage only.
364
+ def _InternalAddFileDescriptor(self, file_desc):
365
+ """Adds a FileDescriptor to the pool, non-recursively.
366
+
367
+ If the FileDescriptor contains messages or enums, the caller must explicitly
368
+ register them.
369
+
370
+ Args:
371
+ file_desc: A FileDescriptor.
372
+ """
373
+
374
+ self._AddFileDescriptor(file_desc)
375
+ # TODO(jieluo): This is a temporary solution for FieldDescriptor.file.
376
+ # FieldDescriptor.file is added in code gen. Remove this solution after
377
+ # maybe 2020 for compatibility reason (with 3.4.1 only).
378
+ for extension in file_desc.extensions_by_name.values():
379
+ self._file_desc_by_toplevel_extension[
380
+ extension.full_name] = file_desc
381
+
382
+ def _AddFileDescriptor(self, file_desc):
383
+ """Adds a FileDescriptor to the pool, non-recursively.
384
+
385
+ If the FileDescriptor contains messages or enums, the caller must explicitly
386
+ register them.
387
+
388
+ Args:
389
+ file_desc: A FileDescriptor.
390
+ """
391
+
392
+ if not isinstance(file_desc, descriptor.FileDescriptor):
393
+ raise TypeError('Expected instance of descriptor.FileDescriptor.')
394
+ self._file_descriptors[file_desc.name] = file_desc
395
+
396
+ def FindFileByName(self, file_name):
397
+ """Gets a FileDescriptor by file name.
398
+
399
+ Args:
400
+ file_name (str): The path to the file to get a descriptor for.
401
+
402
+ Returns:
403
+ FileDescriptor: The descriptor for the named file.
404
+
405
+ Raises:
406
+ KeyError: if the file cannot be found in the pool.
407
+ """
408
+
409
+ try:
410
+ return self._file_descriptors[file_name]
411
+ except KeyError:
412
+ pass
413
+
414
+ try:
415
+ file_proto = self._internal_db.FindFileByName(file_name)
416
+ except KeyError as error:
417
+ if self._descriptor_db:
418
+ file_proto = self._descriptor_db.FindFileByName(file_name)
419
+ else:
420
+ raise error
421
+ if not file_proto:
422
+ raise KeyError('Cannot find a file named %s' % file_name)
423
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
424
+
425
+ def FindFileContainingSymbol(self, symbol):
426
+ """Gets the FileDescriptor for the file containing the specified symbol.
427
+
428
+ Args:
429
+ symbol (str): The name of the symbol to search for.
430
+
431
+ Returns:
432
+ FileDescriptor: Descriptor for the file that contains the specified
433
+ symbol.
434
+
435
+ Raises:
436
+ KeyError: if the file cannot be found in the pool.
437
+ """
438
+
439
+ symbol = _NormalizeFullyQualifiedName(symbol)
440
+ try:
441
+ return self._InternalFindFileContainingSymbol(symbol)
442
+ except KeyError:
443
+ pass
444
+
445
+ try:
446
+ # Try fallback database. Build and find again if possible.
447
+ self._FindFileContainingSymbolInDb(symbol)
448
+ return self._InternalFindFileContainingSymbol(symbol)
449
+ except KeyError:
450
+ raise KeyError('Cannot find a file containing %s' % symbol)
451
+
452
+ def _InternalFindFileContainingSymbol(self, symbol):
453
+ """Gets the already built FileDescriptor containing the specified symbol.
454
+
455
+ Args:
456
+ symbol (str): The name of the symbol to search for.
457
+
458
+ Returns:
459
+ FileDescriptor: Descriptor for the file that contains the specified
460
+ symbol.
461
+
462
+ Raises:
463
+ KeyError: if the file cannot be found in the pool.
464
+ """
465
+ try:
466
+ return self._descriptors[symbol].file
467
+ except KeyError:
468
+ pass
469
+
470
+ try:
471
+ return self._enum_descriptors[symbol].file
472
+ except KeyError:
473
+ pass
474
+
475
+ try:
476
+ return self._service_descriptors[symbol].file
477
+ except KeyError:
478
+ pass
479
+
480
+ try:
481
+ return self._top_enum_values[symbol].type.file
482
+ except KeyError:
483
+ pass
484
+
485
+ try:
486
+ return self._file_desc_by_toplevel_extension[symbol]
487
+ except KeyError:
488
+ pass
489
+
490
+ # Try fields, enum values and nested extensions inside a message.
491
+ top_name, _, sub_name = symbol.rpartition('.')
492
+ try:
493
+ message = self.FindMessageTypeByName(top_name)
494
+ assert (sub_name in message.extensions_by_name or
495
+ sub_name in message.fields_by_name or
496
+ sub_name in message.enum_values_by_name)
497
+ return message.file
498
+ except (KeyError, AssertionError):
499
+ raise KeyError('Cannot find a file containing %s' % symbol)
500
+
501
+ def FindMessageTypeByName(self, full_name):
502
+ """Loads the named descriptor from the pool.
503
+
504
+ Args:
505
+ full_name (str): The full name of the descriptor to load.
506
+
507
+ Returns:
508
+ Descriptor: The descriptor for the named type.
509
+
510
+ Raises:
511
+ KeyError: if the message cannot be found in the pool.
512
+ """
513
+
514
+ full_name = _NormalizeFullyQualifiedName(full_name)
515
+ if full_name not in self._descriptors:
516
+ self._FindFileContainingSymbolInDb(full_name)
517
+ return self._descriptors[full_name]
518
+
519
+ def FindEnumTypeByName(self, full_name):
520
+ """Loads the named enum descriptor from the pool.
521
+
522
+ Args:
523
+ full_name (str): The full name of the enum descriptor to load.
524
+
525
+ Returns:
526
+ EnumDescriptor: The enum descriptor for the named type.
527
+
528
+ Raises:
529
+ KeyError: if the enum cannot be found in the pool.
530
+ """
531
+
532
+ full_name = _NormalizeFullyQualifiedName(full_name)
533
+ if full_name not in self._enum_descriptors:
534
+ self._FindFileContainingSymbolInDb(full_name)
535
+ return self._enum_descriptors[full_name]
536
+
537
+ def FindFieldByName(self, full_name):
538
+ """Loads the named field descriptor from the pool.
539
+
540
+ Args:
541
+ full_name (str): The full name of the field descriptor to load.
542
+
543
+ Returns:
544
+ FieldDescriptor: The field descriptor for the named field.
545
+
546
+ Raises:
547
+ KeyError: if the field cannot be found in the pool.
548
+ """
549
+ full_name = _NormalizeFullyQualifiedName(full_name)
550
+ message_name, _, field_name = full_name.rpartition('.')
551
+ message_descriptor = self.FindMessageTypeByName(message_name)
552
+ return message_descriptor.fields_by_name[field_name]
553
+
554
+ def FindOneofByName(self, full_name):
555
+ """Loads the named oneof descriptor from the pool.
556
+
557
+ Args:
558
+ full_name (str): The full name of the oneof descriptor to load.
559
+
560
+ Returns:
561
+ OneofDescriptor: The oneof descriptor for the named oneof.
562
+
563
+ Raises:
564
+ KeyError: if the oneof cannot be found in the pool.
565
+ """
566
+ full_name = _NormalizeFullyQualifiedName(full_name)
567
+ message_name, _, oneof_name = full_name.rpartition('.')
568
+ message_descriptor = self.FindMessageTypeByName(message_name)
569
+ return message_descriptor.oneofs_by_name[oneof_name]
570
+
571
+ def FindExtensionByName(self, full_name):
572
+ """Loads the named extension descriptor from the pool.
573
+
574
+ Args:
575
+ full_name (str): The full name of the extension descriptor to load.
576
+
577
+ Returns:
578
+ FieldDescriptor: The field descriptor for the named extension.
579
+
580
+ Raises:
581
+ KeyError: if the extension cannot be found in the pool.
582
+ """
583
+ full_name = _NormalizeFullyQualifiedName(full_name)
584
+ try:
585
+ # The proto compiler does not give any link between the FileDescriptor
586
+ # and top-level extensions unless the FileDescriptorProto is added to
587
+ # the DescriptorDatabase, but this can impact memory usage.
588
+ # So we registered these extensions by name explicitly.
589
+ return self._toplevel_extensions[full_name]
590
+ except KeyError:
591
+ pass
592
+ message_name, _, extension_name = full_name.rpartition('.')
593
+ try:
594
+ # Most extensions are nested inside a message.
595
+ scope = self.FindMessageTypeByName(message_name)
596
+ except KeyError:
597
+ # Some extensions are defined at file scope.
598
+ scope = self._FindFileContainingSymbolInDb(full_name)
599
+ return scope.extensions_by_name[extension_name]
600
+
601
+ def FindExtensionByNumber(self, message_descriptor, number):
602
+ """Gets the extension of the specified message with the specified number.
603
+
604
+ Extensions have to be registered to this pool by calling :func:`Add` or
605
+ :func:`AddExtensionDescriptor`.
606
+
607
+ Args:
608
+ message_descriptor (Descriptor): descriptor of the extended message.
609
+ number (int): Number of the extension field.
610
+
611
+ Returns:
612
+ FieldDescriptor: The descriptor for the extension.
613
+
614
+ Raises:
615
+ KeyError: when no extension with the given number is known for the
616
+ specified message.
617
+ """
618
+ try:
619
+ return self._extensions_by_number[message_descriptor][number]
620
+ except KeyError:
621
+ self._TryLoadExtensionFromDB(message_descriptor, number)
622
+ return self._extensions_by_number[message_descriptor][number]
623
+
624
+ def FindAllExtensions(self, message_descriptor):
625
+ """Gets all the known extensions of a given message.
626
+
627
+ Extensions have to be registered to this pool by build related
628
+ :func:`Add` or :func:`AddExtensionDescriptor`.
629
+
630
+ Args:
631
+ message_descriptor (Descriptor): Descriptor of the extended message.
632
+
633
+ Returns:
634
+ list[FieldDescriptor]: Field descriptors describing the extensions.
635
+ """
636
+ # Fallback to descriptor db if FindAllExtensionNumbers is provided.
637
+ if self._descriptor_db and hasattr(
638
+ self._descriptor_db, 'FindAllExtensionNumbers'):
639
+ full_name = message_descriptor.full_name
640
+ all_numbers = self._descriptor_db.FindAllExtensionNumbers(full_name)
641
+ for number in all_numbers:
642
+ if number in self._extensions_by_number[message_descriptor]:
643
+ continue
644
+ self._TryLoadExtensionFromDB(message_descriptor, number)
645
+
646
+ return list(self._extensions_by_number[message_descriptor].values())
647
+
648
+ def _TryLoadExtensionFromDB(self, message_descriptor, number):
649
+ """Try to Load extensions from descriptor db.
650
+
651
+ Args:
652
+ message_descriptor: descriptor of the extended message.
653
+ number: the extension number that needs to be loaded.
654
+ """
655
+ if not self._descriptor_db:
656
+ return
657
+ # Only supported when FindFileContainingExtension is provided.
658
+ if not hasattr(
659
+ self._descriptor_db, 'FindFileContainingExtension'):
660
+ return
661
+
662
+ full_name = message_descriptor.full_name
663
+ file_proto = self._descriptor_db.FindFileContainingExtension(
664
+ full_name, number)
665
+
666
+ if file_proto is None:
667
+ return
668
+
669
+ try:
670
+ self._ConvertFileProtoToFileDescriptor(file_proto)
671
+ except:
672
+ warn_msg = ('Unable to load proto file %s for extension number %d.' %
673
+ (file_proto.name, number))
674
+ warnings.warn(warn_msg, RuntimeWarning)
675
+
676
+ def FindServiceByName(self, full_name):
677
+ """Loads the named service descriptor from the pool.
678
+
679
+ Args:
680
+ full_name (str): The full name of the service descriptor to load.
681
+
682
+ Returns:
683
+ ServiceDescriptor: The service descriptor for the named service.
684
+
685
+ Raises:
686
+ KeyError: if the service cannot be found in the pool.
687
+ """
688
+ full_name = _NormalizeFullyQualifiedName(full_name)
689
+ if full_name not in self._service_descriptors:
690
+ self._FindFileContainingSymbolInDb(full_name)
691
+ return self._service_descriptors[full_name]
692
+
693
+ def FindMethodByName(self, full_name):
694
+ """Loads the named service method descriptor from the pool.
695
+
696
+ Args:
697
+ full_name (str): The full name of the method descriptor to load.
698
+
699
+ Returns:
700
+ MethodDescriptor: The method descriptor for the service method.
701
+
702
+ Raises:
703
+ KeyError: if the method cannot be found in the pool.
704
+ """
705
+ full_name = _NormalizeFullyQualifiedName(full_name)
706
+ service_name, _, method_name = full_name.rpartition('.')
707
+ service_descriptor = self.FindServiceByName(service_name)
708
+ return service_descriptor.methods_by_name[method_name]
709
+
710
+ def _FindFileContainingSymbolInDb(self, symbol):
711
+ """Finds the file in descriptor DB containing the specified symbol.
712
+
713
+ Args:
714
+ symbol (str): The name of the symbol to search for.
715
+
716
+ Returns:
717
+ FileDescriptor: The file that contains the specified symbol.
718
+
719
+ Raises:
720
+ KeyError: if the file cannot be found in the descriptor database.
721
+ """
722
+ try:
723
+ file_proto = self._internal_db.FindFileContainingSymbol(symbol)
724
+ except KeyError as error:
725
+ if self._descriptor_db:
726
+ file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
727
+ else:
728
+ raise error
729
+ if not file_proto:
730
+ raise KeyError('Cannot find a file containing %s' % symbol)
731
+ return self._ConvertFileProtoToFileDescriptor(file_proto)
732
+
733
+ def _ConvertFileProtoToFileDescriptor(self, file_proto):
734
+ """Creates a FileDescriptor from a proto or returns a cached copy.
735
+
736
+ This method also has the side effect of loading all the symbols found in
737
+ the file into the appropriate dictionaries in the pool.
738
+
739
+ Args:
740
+ file_proto: The proto to convert.
741
+
742
+ Returns:
743
+ A FileDescriptor matching the passed in proto.
744
+ """
745
+ if file_proto.name not in self._file_descriptors:
746
+ built_deps = list(self._GetDeps(file_proto.dependency))
747
+ direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
748
+ public_deps = [direct_deps[i] for i in file_proto.public_dependency]
749
+
750
+ file_descriptor = descriptor.FileDescriptor(
751
+ pool=self,
752
+ name=file_proto.name,
753
+ package=file_proto.package,
754
+ syntax=file_proto.syntax,
755
+ options=_OptionsOrNone(file_proto),
756
+ serialized_pb=file_proto.SerializeToString(),
757
+ dependencies=direct_deps,
758
+ public_dependencies=public_deps,
759
+ # pylint: disable=protected-access
760
+ create_key=descriptor._internal_create_key)
761
+ scope = {}
762
+
763
+ # This loop extracts all the message and enum types from all the
764
+ # dependencies of the file_proto. This is necessary to create the
765
+ # scope of available message types when defining the passed in
766
+ # file proto.
767
+ for dependency in built_deps:
768
+ scope.update(self._ExtractSymbols(
769
+ dependency.message_types_by_name.values()))
770
+ scope.update((_PrefixWithDot(enum.full_name), enum)
771
+ for enum in dependency.enum_types_by_name.values())
772
+
773
+ for message_type in file_proto.message_type:
774
+ message_desc = self._ConvertMessageDescriptor(
775
+ message_type, file_proto.package, file_descriptor, scope,
776
+ file_proto.syntax)
777
+ file_descriptor.message_types_by_name[message_desc.name] = (
778
+ message_desc)
779
+
780
+ for enum_type in file_proto.enum_type:
781
+ file_descriptor.enum_types_by_name[enum_type.name] = (
782
+ self._ConvertEnumDescriptor(enum_type, file_proto.package,
783
+ file_descriptor, None, scope, True))
784
+
785
+ for index, extension_proto in enumerate(file_proto.extension):
786
+ extension_desc = self._MakeFieldDescriptor(
787
+ extension_proto, file_proto.package, index, file_descriptor,
788
+ is_extension=True)
789
+ extension_desc.containing_type = self._GetTypeFromScope(
790
+ file_descriptor.package, extension_proto.extendee, scope)
791
+ self._SetFieldType(extension_proto, extension_desc,
792
+ file_descriptor.package, scope)
793
+ file_descriptor.extensions_by_name[extension_desc.name] = (
794
+ extension_desc)
795
+ self._file_desc_by_toplevel_extension[extension_desc.full_name] = (
796
+ file_descriptor)
797
+
798
+ for desc_proto in file_proto.message_type:
799
+ self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
800
+
801
+ if file_proto.package:
802
+ desc_proto_prefix = _PrefixWithDot(file_proto.package)
803
+ else:
804
+ desc_proto_prefix = ''
805
+
806
+ for desc_proto in file_proto.message_type:
807
+ desc = self._GetTypeFromScope(
808
+ desc_proto_prefix, desc_proto.name, scope)
809
+ file_descriptor.message_types_by_name[desc_proto.name] = desc
810
+
811
+ for index, service_proto in enumerate(file_proto.service):
812
+ file_descriptor.services_by_name[service_proto.name] = (
813
+ self._MakeServiceDescriptor(service_proto, index, scope,
814
+ file_proto.package, file_descriptor))
815
+
816
+ self._file_descriptors[file_proto.name] = file_descriptor
817
+
818
+ # Add extensions to the pool
819
+ file_desc = self._file_descriptors[file_proto.name]
820
+ for extension in file_desc.extensions_by_name.values():
821
+ self._AddExtensionDescriptor(extension)
822
+ for message_type in file_desc.message_types_by_name.values():
823
+ for extension in message_type.extensions:
824
+ self._AddExtensionDescriptor(extension)
825
+
826
+ return file_desc
827
+
828
+ def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
829
+ scope=None, syntax=None):
830
+ """Adds the proto to the pool in the specified package.
831
+
832
+ Args:
833
+ desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
834
+ package: The package the proto should be located in.
835
+ file_desc: The file containing this message.
836
+ scope: Dict mapping short and full symbols to message and enum types.
837
+ syntax: string indicating syntax of the file ("proto2" or "proto3")
838
+
839
+ Returns:
840
+ The added descriptor.
841
+ """
842
+
843
+ if package:
844
+ desc_name = '.'.join((package, desc_proto.name))
845
+ else:
846
+ desc_name = desc_proto.name
847
+
848
+ if file_desc is None:
849
+ file_name = None
850
+ else:
851
+ file_name = file_desc.name
852
+
853
+ if scope is None:
854
+ scope = {}
855
+
856
+ nested = [
857
+ self._ConvertMessageDescriptor(
858
+ nested, desc_name, file_desc, scope, syntax)
859
+ for nested in desc_proto.nested_type]
860
+ enums = [
861
+ self._ConvertEnumDescriptor(enum, desc_name, file_desc, None,
862
+ scope, False)
863
+ for enum in desc_proto.enum_type]
864
+ fields = [self._MakeFieldDescriptor(field, desc_name, index, file_desc)
865
+ for index, field in enumerate(desc_proto.field)]
866
+ extensions = [
867
+ self._MakeFieldDescriptor(extension, desc_name, index, file_desc,
868
+ is_extension=True)
869
+ for index, extension in enumerate(desc_proto.extension)]
870
+ oneofs = [
871
+ # pylint: disable=g-complex-comprehension
872
+ descriptor.OneofDescriptor(
873
+ desc.name,
874
+ '.'.join((desc_name, desc.name)),
875
+ index,
876
+ None,
877
+ [],
878
+ _OptionsOrNone(desc),
879
+ # pylint: disable=protected-access
880
+ create_key=descriptor._internal_create_key)
881
+ for index, desc in enumerate(desc_proto.oneof_decl)
882
+ ]
883
+ extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
884
+ if extension_ranges:
885
+ is_extendable = True
886
+ else:
887
+ is_extendable = False
888
+ desc = descriptor.Descriptor(
889
+ name=desc_proto.name,
890
+ full_name=desc_name,
891
+ filename=file_name,
892
+ containing_type=None,
893
+ fields=fields,
894
+ oneofs=oneofs,
895
+ nested_types=nested,
896
+ enum_types=enums,
897
+ extensions=extensions,
898
+ options=_OptionsOrNone(desc_proto),
899
+ is_extendable=is_extendable,
900
+ extension_ranges=extension_ranges,
901
+ file=file_desc,
902
+ serialized_start=None,
903
+ serialized_end=None,
904
+ syntax=syntax,
905
+ # pylint: disable=protected-access
906
+ create_key=descriptor._internal_create_key)
907
+ for nested in desc.nested_types:
908
+ nested.containing_type = desc
909
+ for enum in desc.enum_types:
910
+ enum.containing_type = desc
911
+ for field_index, field_desc in enumerate(desc_proto.field):
912
+ if field_desc.HasField('oneof_index'):
913
+ oneof_index = field_desc.oneof_index
914
+ oneofs[oneof_index].fields.append(fields[field_index])
915
+ fields[field_index].containing_oneof = oneofs[oneof_index]
916
+
917
+ scope[_PrefixWithDot(desc_name)] = desc
918
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
919
+ self._descriptors[desc_name] = desc
920
+ return desc
921
+
922
+ def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
923
+ containing_type=None, scope=None, top_level=False):
924
+ """Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
925
+
926
+ Args:
927
+ enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
928
+ package: Optional package name for the new message EnumDescriptor.
929
+ file_desc: The file containing the enum descriptor.
930
+ containing_type: The type containing this enum.
931
+ scope: Scope containing available types.
932
+ top_level: If True, the enum is a top level symbol. If False, the enum
933
+ is defined inside a message.
934
+
935
+ Returns:
936
+ The added descriptor
937
+ """
938
+
939
+ if package:
940
+ enum_name = '.'.join((package, enum_proto.name))
941
+ else:
942
+ enum_name = enum_proto.name
943
+
944
+ if file_desc is None:
945
+ file_name = None
946
+ else:
947
+ file_name = file_desc.name
948
+
949
+ values = [self._MakeEnumValueDescriptor(value, index)
950
+ for index, value in enumerate(enum_proto.value)]
951
+ desc = descriptor.EnumDescriptor(name=enum_proto.name,
952
+ full_name=enum_name,
953
+ filename=file_name,
954
+ file=file_desc,
955
+ values=values,
956
+ containing_type=containing_type,
957
+ options=_OptionsOrNone(enum_proto),
958
+ # pylint: disable=protected-access
959
+ create_key=descriptor._internal_create_key)
960
+ scope['.%s' % enum_name] = desc
961
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
962
+ self._enum_descriptors[enum_name] = desc
963
+
964
+ # Add top level enum values.
965
+ if top_level:
966
+ for value in values:
967
+ full_name = _NormalizeFullyQualifiedName(
968
+ '.'.join((package, value.name)))
969
+ self._CheckConflictRegister(value, full_name, file_name)
970
+ self._top_enum_values[full_name] = value
971
+
972
+ return desc
973
+
974
+ def _MakeFieldDescriptor(self, field_proto, message_name, index,
975
+ file_desc, is_extension=False):
976
+ """Creates a field descriptor from a FieldDescriptorProto.
977
+
978
+ For message and enum type fields, this method will do a look up
979
+ in the pool for the appropriate descriptor for that type. If it
980
+ is unavailable, it will fall back to the _source function to
981
+ create it. If this type is still unavailable, construction will
982
+ fail.
983
+
984
+ Args:
985
+ field_proto: The proto describing the field.
986
+ message_name: The name of the containing message.
987
+ index: Index of the field
988
+ file_desc: The file containing the field descriptor.
989
+ is_extension: Indication that this field is for an extension.
990
+
991
+ Returns:
992
+ An initialized FieldDescriptor object
993
+ """
994
+
995
+ if message_name:
996
+ full_name = '.'.join((message_name, field_proto.name))
997
+ else:
998
+ full_name = field_proto.name
999
+
1000
+ if field_proto.json_name:
1001
+ json_name = field_proto.json_name
1002
+ else:
1003
+ json_name = None
1004
+
1005
+ return descriptor.FieldDescriptor(
1006
+ name=field_proto.name,
1007
+ full_name=full_name,
1008
+ index=index,
1009
+ number=field_proto.number,
1010
+ type=field_proto.type,
1011
+ cpp_type=None,
1012
+ message_type=None,
1013
+ enum_type=None,
1014
+ containing_type=None,
1015
+ label=field_proto.label,
1016
+ has_default_value=False,
1017
+ default_value=None,
1018
+ is_extension=is_extension,
1019
+ extension_scope=None,
1020
+ options=_OptionsOrNone(field_proto),
1021
+ json_name=json_name,
1022
+ file=file_desc,
1023
+ # pylint: disable=protected-access
1024
+ create_key=descriptor._internal_create_key)
1025
+
1026
+ def _SetAllFieldTypes(self, package, desc_proto, scope):
1027
+ """Sets all the descriptor's fields's types.
1028
+
1029
+ This method also sets the containing types on any extensions.
1030
+
1031
+ Args:
1032
+ package: The current package of desc_proto.
1033
+ desc_proto: The message descriptor to update.
1034
+ scope: Enclosing scope of available types.
1035
+ """
1036
+
1037
+ package = _PrefixWithDot(package)
1038
+
1039
+ main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
1040
+
1041
+ if package == '.':
1042
+ nested_package = _PrefixWithDot(desc_proto.name)
1043
+ else:
1044
+ nested_package = '.'.join([package, desc_proto.name])
1045
+
1046
+ for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
1047
+ self._SetFieldType(field_proto, field_desc, nested_package, scope)
1048
+
1049
+ for extension_proto, extension_desc in (
1050
+ zip(desc_proto.extension, main_desc.extensions)):
1051
+ extension_desc.containing_type = self._GetTypeFromScope(
1052
+ nested_package, extension_proto.extendee, scope)
1053
+ self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
1054
+
1055
+ for nested_type in desc_proto.nested_type:
1056
+ self._SetAllFieldTypes(nested_package, nested_type, scope)
1057
+
1058
+ def _SetFieldType(self, field_proto, field_desc, package, scope):
1059
+ """Sets the field's type, cpp_type, message_type and enum_type.
1060
+
1061
+ Args:
1062
+ field_proto: Data about the field in proto format.
1063
+ field_desc: The descriptor to modify.
1064
+ package: The package the field's container is in.
1065
+ scope: Enclosing scope of available types.
1066
+ """
1067
+ if field_proto.type_name:
1068
+ desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
1069
+ else:
1070
+ desc = None
1071
+
1072
+ if not field_proto.HasField('type'):
1073
+ if isinstance(desc, descriptor.Descriptor):
1074
+ field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
1075
+ else:
1076
+ field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
1077
+
1078
+ field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
1079
+ field_proto.type)
1080
+
1081
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
1082
+ or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
1083
+ field_desc.message_type = desc
1084
+
1085
+ if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1086
+ field_desc.enum_type = desc
1087
+
1088
+ if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
1089
+ field_desc.has_default_value = False
1090
+ field_desc.default_value = []
1091
+ elif field_proto.HasField('default_value'):
1092
+ field_desc.has_default_value = True
1093
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
1094
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
1095
+ field_desc.default_value = float(field_proto.default_value)
1096
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
1097
+ field_desc.default_value = field_proto.default_value
1098
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
1099
+ field_desc.default_value = field_proto.default_value.lower() == 'true'
1100
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1101
+ field_desc.default_value = field_desc.enum_type.values_by_name[
1102
+ field_proto.default_value].number
1103
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
1104
+ field_desc.default_value = text_encoding.CUnescape(
1105
+ field_proto.default_value)
1106
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
1107
+ field_desc.default_value = None
1108
+ else:
1109
+ # All other types are of the "int" type.
1110
+ field_desc.default_value = int(field_proto.default_value)
1111
+ else:
1112
+ field_desc.has_default_value = False
1113
+ if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
1114
+ field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
1115
+ field_desc.default_value = 0.0
1116
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
1117
+ field_desc.default_value = u''
1118
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
1119
+ field_desc.default_value = False
1120
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
1121
+ field_desc.default_value = field_desc.enum_type.values[0].number
1122
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
1123
+ field_desc.default_value = b''
1124
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE:
1125
+ field_desc.default_value = None
1126
+ elif field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP:
1127
+ field_desc.default_value = None
1128
+ else:
1129
+ # All other types are of the "int" type.
1130
+ field_desc.default_value = 0
1131
+
1132
+ field_desc.type = field_proto.type
1133
+
1134
+ def _MakeEnumValueDescriptor(self, value_proto, index):
1135
+ """Creates a enum value descriptor object from a enum value proto.
1136
+
1137
+ Args:
1138
+ value_proto: The proto describing the enum value.
1139
+ index: The index of the enum value.
1140
+
1141
+ Returns:
1142
+ An initialized EnumValueDescriptor object.
1143
+ """
1144
+
1145
+ return descriptor.EnumValueDescriptor(
1146
+ name=value_proto.name,
1147
+ index=index,
1148
+ number=value_proto.number,
1149
+ options=_OptionsOrNone(value_proto),
1150
+ type=None,
1151
+ # pylint: disable=protected-access
1152
+ create_key=descriptor._internal_create_key)
1153
+
1154
+ def _MakeServiceDescriptor(self, service_proto, service_index, scope,
1155
+ package, file_desc):
1156
+ """Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
1157
+
1158
+ Args:
1159
+ service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
1160
+ service_index: The index of the service in the File.
1161
+ scope: Dict mapping short and full symbols to message and enum types.
1162
+ package: Optional package name for the new message EnumDescriptor.
1163
+ file_desc: The file containing the service descriptor.
1164
+
1165
+ Returns:
1166
+ The added descriptor.
1167
+ """
1168
+
1169
+ if package:
1170
+ service_name = '.'.join((package, service_proto.name))
1171
+ else:
1172
+ service_name = service_proto.name
1173
+
1174
+ methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
1175
+ scope, index)
1176
+ for index, method_proto in enumerate(service_proto.method)]
1177
+ desc = descriptor.ServiceDescriptor(
1178
+ name=service_proto.name,
1179
+ full_name=service_name,
1180
+ index=service_index,
1181
+ methods=methods,
1182
+ options=_OptionsOrNone(service_proto),
1183
+ file=file_desc,
1184
+ # pylint: disable=protected-access
1185
+ create_key=descriptor._internal_create_key)
1186
+ self._CheckConflictRegister(desc, desc.full_name, desc.file.name)
1187
+ self._service_descriptors[service_name] = desc
1188
+ return desc
1189
+
1190
+ def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
1191
+ index):
1192
+ """Creates a method descriptor from a MethodDescriptorProto.
1193
+
1194
+ Args:
1195
+ method_proto: The proto describing the method.
1196
+ service_name: The name of the containing service.
1197
+ package: Optional package name to look up for types.
1198
+ scope: Scope containing available types.
1199
+ index: Index of the method in the service.
1200
+
1201
+ Returns:
1202
+ An initialized MethodDescriptor object.
1203
+ """
1204
+ full_name = '.'.join((service_name, method_proto.name))
1205
+ input_type = self._GetTypeFromScope(
1206
+ package, method_proto.input_type, scope)
1207
+ output_type = self._GetTypeFromScope(
1208
+ package, method_proto.output_type, scope)
1209
+ return descriptor.MethodDescriptor(
1210
+ name=method_proto.name,
1211
+ full_name=full_name,
1212
+ index=index,
1213
+ containing_service=None,
1214
+ input_type=input_type,
1215
+ output_type=output_type,
1216
+ client_streaming=method_proto.client_streaming,
1217
+ server_streaming=method_proto.server_streaming,
1218
+ options=_OptionsOrNone(method_proto),
1219
+ # pylint: disable=protected-access
1220
+ create_key=descriptor._internal_create_key)
1221
+
1222
+ def _ExtractSymbols(self, descriptors):
1223
+ """Pulls out all the symbols from descriptor protos.
1224
+
1225
+ Args:
1226
+ descriptors: The messages to extract descriptors from.
1227
+ Yields:
1228
+ A two element tuple of the type name and descriptor object.
1229
+ """
1230
+
1231
+ for desc in descriptors:
1232
+ yield (_PrefixWithDot(desc.full_name), desc)
1233
+ for symbol in self._ExtractSymbols(desc.nested_types):
1234
+ yield symbol
1235
+ for enum in desc.enum_types:
1236
+ yield (_PrefixWithDot(enum.full_name), enum)
1237
+
1238
+ def _GetDeps(self, dependencies, visited=None):
1239
+ """Recursively finds dependencies for file protos.
1240
+
1241
+ Args:
1242
+ dependencies: The names of the files being depended on.
1243
+ visited: The names of files already found.
1244
+
1245
+ Yields:
1246
+ Each direct and indirect dependency.
1247
+ """
1248
+
1249
+ visited = visited or set()
1250
+ for dependency in dependencies:
1251
+ if dependency not in visited:
1252
+ visited.add(dependency)
1253
+ dep_desc = self.FindFileByName(dependency)
1254
+ yield dep_desc
1255
+ public_files = [d.name for d in dep_desc.public_dependencies]
1256
+ yield from self._GetDeps(public_files, visited)
1257
+
1258
+ def _GetTypeFromScope(self, package, type_name, scope):
1259
+ """Finds a given type name in the current scope.
1260
+
1261
+ Args:
1262
+ package: The package the proto should be located in.
1263
+ type_name: The name of the type to be found in the scope.
1264
+ scope: Dict mapping short and full symbols to message and enum types.
1265
+
1266
+ Returns:
1267
+ The descriptor for the requested type.
1268
+ """
1269
+ if type_name not in scope:
1270
+ components = _PrefixWithDot(package).split('.')
1271
+ while components:
1272
+ possible_match = '.'.join(components + [type_name])
1273
+ if possible_match in scope:
1274
+ type_name = possible_match
1275
+ break
1276
+ else:
1277
+ components.pop(-1)
1278
+ return scope[type_name]
1279
+
1280
+
1281
+ def _PrefixWithDot(name):
1282
+ return name if name.startswith('.') else '.%s' % name
1283
+
1284
+
1285
+ if _USE_C_DESCRIPTORS:
1286
+ # TODO(amauryfa): This pool could be constructed from Python code, when we
1287
+ # support a flag like 'use_cpp_generated_pool=True'.
1288
+ # pylint: disable=protected-access
1289
+ _DEFAULT = descriptor._message.default_pool
1290
+ else:
1291
+ _DEFAULT = DescriptorPool()
1292
+
1293
+
1294
+ def Default():
1295
+ return _DEFAULT
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/empty_pb2.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: google/protobuf/empty.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf.internal import builder as _builder
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import symbol_database as _symbol_database
9
+ # @@protoc_insertion_point(imports)
10
+
11
+ _sym_db = _symbol_database.Default()
12
+
13
+
14
+
15
+
16
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
17
+
18
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
19
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals())
20
+ if _descriptor._USE_C_DESCRIPTORS == False:
21
+
22
+ DESCRIPTOR._options = None
23
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
24
+ _EMPTY._serialized_start=48
25
+ _EMPTY._serialized_end=55
26
+ # @@protoc_insertion_point(module_scope)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/field_mask_pb2.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: google/protobuf/field_mask.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf.internal import builder as _builder
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import symbol_database as _symbol_database
9
+ # @@protoc_insertion_point(imports)
10
+
11
+ _sym_db = _symbol_database.Default()
12
+
13
+
14
+
15
+
16
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
17
+
18
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
19
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals())
20
+ if _descriptor._USE_C_DESCRIPTORS == False:
21
+
22
+ DESCRIPTOR._options = None
23
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
24
+ _FIELDMASK._serialized_start=53
25
+ _FIELDMASK._serialized_end=79
26
+ # @@protoc_insertion_point(module_scope)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/message_factory.py ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """Provides a factory class for generating dynamic messages.
32
+
33
+ The easiest way to use this class is if you have access to the FileDescriptor
34
+ protos containing the messages you want to create you can just do the following:
35
+
36
+ message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
37
+ my_proto_instance = message_classes['some.proto.package.MessageName']()
38
+ """
39
+
40
+ __author__ = 'matthewtoia@google.com (Matt Toia)'
41
+
42
+ from google.protobuf.internal import api_implementation
43
+ from google.protobuf import descriptor_pool
44
+ from google.protobuf import message
45
+
46
+ if api_implementation.Type() == 'cpp':
47
+ from google.protobuf.pyext import cpp_message as message_impl
48
+ else:
49
+ from google.protobuf.internal import python_message as message_impl
50
+
51
+
52
+ # The type of all Message classes.
53
+ _GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
54
+
55
+
56
+ class MessageFactory(object):
57
+ """Factory for creating Proto2 messages from descriptors in a pool."""
58
+
59
+ def __init__(self, pool=None):
60
+ """Initializes a new factory."""
61
+ self.pool = pool or descriptor_pool.DescriptorPool()
62
+
63
+ # local cache of all classes built from protobuf descriptors
64
+ self._classes = {}
65
+
66
+ def GetPrototype(self, descriptor):
67
+ """Obtains a proto2 message class based on the passed in descriptor.
68
+
69
+ Passing a descriptor with a fully qualified name matching a previous
70
+ invocation will cause the same class to be returned.
71
+
72
+ Args:
73
+ descriptor: The descriptor to build from.
74
+
75
+ Returns:
76
+ A class describing the passed in descriptor.
77
+ """
78
+ if descriptor not in self._classes:
79
+ result_class = self.CreatePrototype(descriptor)
80
+ # The assignment to _classes is redundant for the base implementation, but
81
+ # might avoid confusion in cases where CreatePrototype gets overridden and
82
+ # does not call the base implementation.
83
+ self._classes[descriptor] = result_class
84
+ return result_class
85
+ return self._classes[descriptor]
86
+
87
+ def CreatePrototype(self, descriptor):
88
+ """Builds a proto2 message class based on the passed in descriptor.
89
+
90
+ Don't call this function directly, it always creates a new class. Call
91
+ GetPrototype() instead. This method is meant to be overridden in subblasses
92
+ to perform additional operations on the newly constructed class.
93
+
94
+ Args:
95
+ descriptor: The descriptor to build from.
96
+
97
+ Returns:
98
+ A class describing the passed in descriptor.
99
+ """
100
+ descriptor_name = descriptor.name
101
+ result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
102
+ descriptor_name,
103
+ (message.Message,),
104
+ {
105
+ 'DESCRIPTOR': descriptor,
106
+ # If module not set, it wrongly points to message_factory module.
107
+ '__module__': None,
108
+ })
109
+ result_class._FACTORY = self # pylint: disable=protected-access
110
+ # Assign in _classes before doing recursive calls to avoid infinite
111
+ # recursion.
112
+ self._classes[descriptor] = result_class
113
+ for field in descriptor.fields:
114
+ if field.message_type:
115
+ self.GetPrototype(field.message_type)
116
+ for extension in result_class.DESCRIPTOR.extensions:
117
+ if extension.containing_type not in self._classes:
118
+ self.GetPrototype(extension.containing_type)
119
+ extended_class = self._classes[extension.containing_type]
120
+ extended_class.RegisterExtension(extension)
121
+ return result_class
122
+
123
+ def GetMessages(self, files):
124
+ """Gets all the messages from a specified file.
125
+
126
+ This will find and resolve dependencies, failing if the descriptor
127
+ pool cannot satisfy them.
128
+
129
+ Args:
130
+ files: The file names to extract messages from.
131
+
132
+ Returns:
133
+ A dictionary mapping proto names to the message classes. This will include
134
+ any dependent messages as well as any messages defined in the same file as
135
+ a specified message.
136
+ """
137
+ result = {}
138
+ for file_name in files:
139
+ file_desc = self.pool.FindFileByName(file_name)
140
+ for desc in file_desc.message_types_by_name.values():
141
+ result[desc.full_name] = self.GetPrototype(desc)
142
+
143
+ # While the extension FieldDescriptors are created by the descriptor pool,
144
+ # the python classes created in the factory need them to be registered
145
+ # explicitly, which is done below.
146
+ #
147
+ # The call to RegisterExtension will specifically check if the
148
+ # extension was already registered on the object and either
149
+ # ignore the registration if the original was the same, or raise
150
+ # an error if they were different.
151
+
152
+ for extension in file_desc.extensions_by_name.values():
153
+ if extension.containing_type not in self._classes:
154
+ self.GetPrototype(extension.containing_type)
155
+ extended_class = self._classes[extension.containing_type]
156
+ extended_class.RegisterExtension(extension)
157
+ return result
158
+
159
+
160
+ _FACTORY = MessageFactory()
161
+
162
+
163
+ def GetMessages(file_protos):
164
+ """Builds a dictionary of all the messages available in a set of files.
165
+
166
+ Args:
167
+ file_protos: Iterable of FileDescriptorProto to build messages out of.
168
+
169
+ Returns:
170
+ A dictionary mapping proto names to the message classes. This will include
171
+ any dependent messages as well as any messages defined in the same file as
172
+ a specified message.
173
+ """
174
+ # The cpp implementation of the protocol buffer library requires to add the
175
+ # message in topological order of the dependency graph.
176
+ file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
177
+ def _AddFile(file_proto):
178
+ for dependency in file_proto.dependency:
179
+ if dependency in file_by_name:
180
+ # Remove from elements to be visited, in order to cut cycles.
181
+ _AddFile(file_by_name.pop(dependency))
182
+ _FACTORY.pool.Add(file_proto)
183
+ while file_by_name:
184
+ _AddFile(file_by_name.popitem()[1])
185
+ return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/service.py ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """DEPRECATED: Declares the RPC service interfaces.
32
+
33
+ This module declares the abstract interfaces underlying proto2 RPC
34
+ services. These are intended to be independent of any particular RPC
35
+ implementation, so that proto2 services can be used on top of a variety
36
+ of implementations. Starting with version 2.3.0, RPC implementations should
37
+ not try to build on these, but should instead provide code generator plugins
38
+ which generate code specific to the particular RPC implementation. This way
39
+ the generated code can be more appropriate for the implementation in use
40
+ and can avoid unnecessary layers of indirection.
41
+ """
42
+
43
+ __author__ = 'petar@google.com (Petar Petrov)'
44
+
45
+
46
+ class RpcException(Exception):
47
+ """Exception raised on failed blocking RPC method call."""
48
+ pass
49
+
50
+
51
+ class Service(object):
52
+
53
+ """Abstract base interface for protocol-buffer-based RPC services.
54
+
55
+ Services themselves are abstract classes (implemented either by servers or as
56
+ stubs), but they subclass this base interface. The methods of this
57
+ interface can be used to call the methods of the service without knowing
58
+ its exact type at compile time (analogous to the Message interface).
59
+ """
60
+
61
+ def GetDescriptor():
62
+ """Retrieves this service's descriptor."""
63
+ raise NotImplementedError
64
+
65
+ def CallMethod(self, method_descriptor, rpc_controller,
66
+ request, done):
67
+ """Calls a method of the service specified by method_descriptor.
68
+
69
+ If "done" is None then the call is blocking and the response
70
+ message will be returned directly. Otherwise the call is asynchronous
71
+ and "done" will later be called with the response value.
72
+
73
+ In the blocking case, RpcException will be raised on error.
74
+
75
+ Preconditions:
76
+
77
+ * method_descriptor.service == GetDescriptor
78
+ * request is of the exact same classes as returned by
79
+ GetRequestClass(method).
80
+ * After the call has started, the request must not be modified.
81
+ * "rpc_controller" is of the correct type for the RPC implementation being
82
+ used by this Service. For stubs, the "correct type" depends on the
83
+ RpcChannel which the stub is using.
84
+
85
+ Postconditions:
86
+
87
+ * "done" will be called when the method is complete. This may be
88
+ before CallMethod() returns or it may be at some point in the future.
89
+ * If the RPC failed, the response value passed to "done" will be None.
90
+ Further details about the failure can be found by querying the
91
+ RpcController.
92
+ """
93
+ raise NotImplementedError
94
+
95
+ def GetRequestClass(self, method_descriptor):
96
+ """Returns the class of the request message for the specified method.
97
+
98
+ CallMethod() requires that the request is of a particular subclass of
99
+ Message. GetRequestClass() gets the default instance of this required
100
+ type.
101
+
102
+ Example:
103
+ method = service.GetDescriptor().FindMethodByName("Foo")
104
+ request = stub.GetRequestClass(method)()
105
+ request.ParseFromString(input)
106
+ service.CallMethod(method, request, callback)
107
+ """
108
+ raise NotImplementedError
109
+
110
+ def GetResponseClass(self, method_descriptor):
111
+ """Returns the class of the response message for the specified method.
112
+
113
+ This method isn't really needed, as the RpcChannel's CallMethod constructs
114
+ the response protocol message. It's provided anyway in case it is useful
115
+ for the caller to know the response type in advance.
116
+ """
117
+ raise NotImplementedError
118
+
119
+
120
+ class RpcController(object):
121
+
122
+ """An RpcController mediates a single method call.
123
+
124
+ The primary purpose of the controller is to provide a way to manipulate
125
+ settings specific to the RPC implementation and to find out about RPC-level
126
+ errors. The methods provided by the RpcController interface are intended
127
+ to be a "least common denominator" set of features which we expect all
128
+ implementations to support. Specific implementations may provide more
129
+ advanced features (e.g. deadline propagation).
130
+ """
131
+
132
+ # Client-side methods below
133
+
134
+ def Reset(self):
135
+ """Resets the RpcController to its initial state.
136
+
137
+ After the RpcController has been reset, it may be reused in
138
+ a new call. Must not be called while an RPC is in progress.
139
+ """
140
+ raise NotImplementedError
141
+
142
+ def Failed(self):
143
+ """Returns true if the call failed.
144
+
145
+ After a call has finished, returns true if the call failed. The possible
146
+ reasons for failure depend on the RPC implementation. Failed() must not
147
+ be called before a call has finished. If Failed() returns true, the
148
+ contents of the response message are undefined.
149
+ """
150
+ raise NotImplementedError
151
+
152
+ def ErrorText(self):
153
+ """If Failed is true, returns a human-readable description of the error."""
154
+ raise NotImplementedError
155
+
156
+ def StartCancel(self):
157
+ """Initiate cancellation.
158
+
159
+ Advises the RPC system that the caller desires that the RPC call be
160
+ canceled. The RPC system may cancel it immediately, may wait awhile and
161
+ then cancel it, or may not even cancel the call at all. If the call is
162
+ canceled, the "done" callback will still be called and the RpcController
163
+ will indicate that the call failed at that time.
164
+ """
165
+ raise NotImplementedError
166
+
167
+ # Server-side methods below
168
+
169
+ def SetFailed(self, reason):
170
+ """Sets a failure reason.
171
+
172
+ Causes Failed() to return true on the client side. "reason" will be
173
+ incorporated into the message returned by ErrorText(). If you find
174
+ you need to return machine-readable information about failures, you
175
+ should incorporate it into your response protocol buffer and should
176
+ NOT call SetFailed().
177
+ """
178
+ raise NotImplementedError
179
+
180
+ def IsCanceled(self):
181
+ """Checks if the client cancelled the RPC.
182
+
183
+ If true, indicates that the client canceled the RPC, so the server may
184
+ as well give up on replying to it. The server should still call the
185
+ final "done" callback.
186
+ """
187
+ raise NotImplementedError
188
+
189
+ def NotifyOnCancel(self, callback):
190
+ """Sets a callback to invoke on cancel.
191
+
192
+ Asks that the given callback be called when the RPC is canceled. The
193
+ callback will always be called exactly once. If the RPC completes without
194
+ being canceled, the callback will be called after completion. If the RPC
195
+ has already been canceled when NotifyOnCancel() is called, the callback
196
+ will be called immediately.
197
+
198
+ NotifyOnCancel() must be called no more than once per request.
199
+ """
200
+ raise NotImplementedError
201
+
202
+
203
+ class RpcChannel(object):
204
+
205
+ """Abstract interface for an RPC channel.
206
+
207
+ An RpcChannel represents a communication line to a service which can be used
208
+ to call that service's methods. The service may be running on another
209
+ machine. Normally, you should not use an RpcChannel directly, but instead
210
+ construct a stub {@link Service} wrapping it. Example:
211
+
212
+ Example:
213
+ RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
214
+ RpcController controller = rpcImpl.Controller()
215
+ MyService service = MyService_Stub(channel)
216
+ service.MyMethod(controller, request, callback)
217
+ """
218
+
219
+ def CallMethod(self, method_descriptor, rpc_controller,
220
+ request, response_class, done):
221
+ """Calls the method identified by the descriptor.
222
+
223
+ Call the given method of the remote service. The signature of this
224
+ procedure looks the same as Service.CallMethod(), but the requirements
225
+ are less strict in one important way: the request object doesn't have to
226
+ be of any specific class as long as its descriptor is method.input_type.
227
+ """
228
+ raise NotImplementedError
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/service_reflection.py ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """Contains metaclasses used to create protocol service and service stub
32
+ classes from ServiceDescriptor objects at runtime.
33
+
34
+ The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
35
+ inject all useful functionality into the classes output by the protocol
36
+ compiler at compile-time.
37
+ """
38
+
39
+ __author__ = 'petar@google.com (Petar Petrov)'
40
+
41
+
42
+ class GeneratedServiceType(type):
43
+
44
+ """Metaclass for service classes created at runtime from ServiceDescriptors.
45
+
46
+ Implementations for all methods described in the Service class are added here
47
+ by this class. We also create properties to allow getting/setting all fields
48
+ in the protocol message.
49
+
50
+ The protocol compiler currently uses this metaclass to create protocol service
51
+ classes at runtime. Clients can also manually create their own classes at
52
+ runtime, as in this example::
53
+
54
+ mydescriptor = ServiceDescriptor(.....)
55
+ class MyProtoService(service.Service):
56
+ __metaclass__ = GeneratedServiceType
57
+ DESCRIPTOR = mydescriptor
58
+ myservice_instance = MyProtoService()
59
+ # ...
60
+ """
61
+
62
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
63
+
64
+ def __init__(cls, name, bases, dictionary):
65
+ """Creates a message service class.
66
+
67
+ Args:
68
+ name: Name of the class (ignored, but required by the metaclass
69
+ protocol).
70
+ bases: Base classes of the class being constructed.
71
+ dictionary: The class dictionary of the class being constructed.
72
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
73
+ describing this protocol service type.
74
+ """
75
+ # Don't do anything if this class doesn't have a descriptor. This happens
76
+ # when a service class is subclassed.
77
+ if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
78
+ return
79
+
80
+ descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
81
+ service_builder = _ServiceBuilder(descriptor)
82
+ service_builder.BuildService(cls)
83
+ cls.DESCRIPTOR = descriptor
84
+
85
+
86
+ class GeneratedServiceStubType(GeneratedServiceType):
87
+
88
+ """Metaclass for service stubs created at runtime from ServiceDescriptors.
89
+
90
+ This class has similar responsibilities as GeneratedServiceType, except that
91
+ it creates the service stub classes.
92
+ """
93
+
94
+ _DESCRIPTOR_KEY = 'DESCRIPTOR'
95
+
96
+ def __init__(cls, name, bases, dictionary):
97
+ """Creates a message service stub class.
98
+
99
+ Args:
100
+ name: Name of the class (ignored, here).
101
+ bases: Base classes of the class being constructed.
102
+ dictionary: The class dictionary of the class being constructed.
103
+ dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
104
+ describing this protocol service type.
105
+ """
106
+ super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
107
+ # Don't do anything if this class doesn't have a descriptor. This happens
108
+ # when a service stub is subclassed.
109
+ if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
110
+ return
111
+
112
+ descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
113
+ service_stub_builder = _ServiceStubBuilder(descriptor)
114
+ service_stub_builder.BuildServiceStub(cls)
115
+
116
+
117
+ class _ServiceBuilder(object):
118
+
119
+ """This class constructs a protocol service class using a service descriptor.
120
+
121
+ Given a service descriptor, this class constructs a class that represents
122
+ the specified service descriptor. One service builder instance constructs
123
+ exactly one service class. That means all instances of that class share the
124
+ same builder.
125
+ """
126
+
127
+ def __init__(self, service_descriptor):
128
+ """Initializes an instance of the service class builder.
129
+
130
+ Args:
131
+ service_descriptor: ServiceDescriptor to use when constructing the
132
+ service class.
133
+ """
134
+ self.descriptor = service_descriptor
135
+
136
+ def BuildService(builder, cls):
137
+ """Constructs the service class.
138
+
139
+ Args:
140
+ cls: The class that will be constructed.
141
+ """
142
+
143
+ # CallMethod needs to operate with an instance of the Service class. This
144
+ # internal wrapper function exists only to be able to pass the service
145
+ # instance to the method that does the real CallMethod work.
146
+ # Making sure to use exact argument names from the abstract interface in
147
+ # service.py to match the type signature
148
+ def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done):
149
+ return builder._CallMethod(self, method_descriptor, rpc_controller,
150
+ request, done)
151
+
152
+ def _WrapGetRequestClass(self, method_descriptor):
153
+ return builder._GetRequestClass(method_descriptor)
154
+
155
+ def _WrapGetResponseClass(self, method_descriptor):
156
+ return builder._GetResponseClass(method_descriptor)
157
+
158
+ builder.cls = cls
159
+ cls.CallMethod = _WrapCallMethod
160
+ cls.GetDescriptor = staticmethod(lambda: builder.descriptor)
161
+ cls.GetDescriptor.__doc__ = 'Returns the service descriptor.'
162
+ cls.GetRequestClass = _WrapGetRequestClass
163
+ cls.GetResponseClass = _WrapGetResponseClass
164
+ for method in builder.descriptor.methods:
165
+ setattr(cls, method.name, builder._GenerateNonImplementedMethod(method))
166
+
167
+ def _CallMethod(self, srvc, method_descriptor,
168
+ rpc_controller, request, callback):
169
+ """Calls the method described by a given method descriptor.
170
+
171
+ Args:
172
+ srvc: Instance of the service for which this method is called.
173
+ method_descriptor: Descriptor that represent the method to call.
174
+ rpc_controller: RPC controller to use for this method's execution.
175
+ request: Request protocol message.
176
+ callback: A callback to invoke after the method has completed.
177
+ """
178
+ if method_descriptor.containing_service != self.descriptor:
179
+ raise RuntimeError(
180
+ 'CallMethod() given method descriptor for wrong service type.')
181
+ method = getattr(srvc, method_descriptor.name)
182
+ return method(rpc_controller, request, callback)
183
+
184
+ def _GetRequestClass(self, method_descriptor):
185
+ """Returns the class of the request protocol message.
186
+
187
+ Args:
188
+ method_descriptor: Descriptor of the method for which to return the
189
+ request protocol message class.
190
+
191
+ Returns:
192
+ A class that represents the input protocol message of the specified
193
+ method.
194
+ """
195
+ if method_descriptor.containing_service != self.descriptor:
196
+ raise RuntimeError(
197
+ 'GetRequestClass() given method descriptor for wrong service type.')
198
+ return method_descriptor.input_type._concrete_class
199
+
200
+ def _GetResponseClass(self, method_descriptor):
201
+ """Returns the class of the response protocol message.
202
+
203
+ Args:
204
+ method_descriptor: Descriptor of the method for which to return the
205
+ response protocol message class.
206
+
207
+ Returns:
208
+ A class that represents the output protocol message of the specified
209
+ method.
210
+ """
211
+ if method_descriptor.containing_service != self.descriptor:
212
+ raise RuntimeError(
213
+ 'GetResponseClass() given method descriptor for wrong service type.')
214
+ return method_descriptor.output_type._concrete_class
215
+
216
+ def _GenerateNonImplementedMethod(self, method):
217
+ """Generates and returns a method that can be set for a service methods.
218
+
219
+ Args:
220
+ method: Descriptor of the service method for which a method is to be
221
+ generated.
222
+
223
+ Returns:
224
+ A method that can be added to the service class.
225
+ """
226
+ return lambda inst, rpc_controller, request, callback: (
227
+ self._NonImplementedMethod(method.name, rpc_controller, callback))
228
+
229
+ def _NonImplementedMethod(self, method_name, rpc_controller, callback):
230
+ """The body of all methods in the generated service class.
231
+
232
+ Args:
233
+ method_name: Name of the method being executed.
234
+ rpc_controller: RPC controller used to execute this method.
235
+ callback: A callback which will be invoked when the method finishes.
236
+ """
237
+ rpc_controller.SetFailed('Method %s not implemented.' % method_name)
238
+ callback(None)
239
+
240
+
241
+ class _ServiceStubBuilder(object):
242
+
243
+ """Constructs a protocol service stub class using a service descriptor.
244
+
245
+ Given a service descriptor, this class constructs a suitable stub class.
246
+ A stub is just a type-safe wrapper around an RpcChannel which emulates a
247
+ local implementation of the service.
248
+
249
+ One service stub builder instance constructs exactly one class. It means all
250
+ instances of that class share the same service stub builder.
251
+ """
252
+
253
+ def __init__(self, service_descriptor):
254
+ """Initializes an instance of the service stub class builder.
255
+
256
+ Args:
257
+ service_descriptor: ServiceDescriptor to use when constructing the
258
+ stub class.
259
+ """
260
+ self.descriptor = service_descriptor
261
+
262
+ def BuildServiceStub(self, cls):
263
+ """Constructs the stub class.
264
+
265
+ Args:
266
+ cls: The class that will be constructed.
267
+ """
268
+
269
+ def _ServiceStubInit(stub, rpc_channel):
270
+ stub.rpc_channel = rpc_channel
271
+ self.cls = cls
272
+ cls.__init__ = _ServiceStubInit
273
+ for method in self.descriptor.methods:
274
+ setattr(cls, method.name, self._GenerateStubMethod(method))
275
+
276
+ def _GenerateStubMethod(self, method):
277
+ return (lambda inst, rpc_controller, request, callback=None:
278
+ self._StubMethod(inst, method, rpc_controller, request, callback))
279
+
280
+ def _StubMethod(self, stub, method_descriptor,
281
+ rpc_controller, request, callback):
282
+ """The body of all service methods in the generated stub class.
283
+
284
+ Args:
285
+ stub: Stub instance.
286
+ method_descriptor: Descriptor of the invoked method.
287
+ rpc_controller: Rpc controller to execute the method.
288
+ request: Request protocol message.
289
+ callback: A callback to execute when the method finishes.
290
+ Returns:
291
+ Response message (in case of blocking call).
292
+ """
293
+ return stub.rpc_channel.CallMethod(
294
+ method_descriptor, rpc_controller, request,
295
+ method_descriptor.output_type._concrete_class, callback)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/struct_pb2.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: google/protobuf/struct.proto
4
+ """Generated protocol buffer code."""
5
+ from google.protobuf.internal import builder as _builder
6
+ from google.protobuf import descriptor as _descriptor
7
+ from google.protobuf import descriptor_pool as _descriptor_pool
8
+ from google.protobuf import symbol_database as _symbol_database
9
+ # @@protoc_insertion_point(imports)
10
+
11
+ _sym_db = _symbol_database.Default()
12
+
13
+
14
+
15
+
16
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
17
+
18
+ _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
19
+ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals())
20
+ if _descriptor._USE_C_DESCRIPTORS == False:
21
+
22
+ DESCRIPTOR._options = None
23
+ DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'
24
+ _STRUCT_FIELDSENTRY._options = None
25
+ _STRUCT_FIELDSENTRY._serialized_options = b'8\001'
26
+ _NULLVALUE._serialized_start=474
27
+ _NULLVALUE._serialized_end=501
28
+ _STRUCT._serialized_start=50
29
+ _STRUCT._serialized_end=182
30
+ _STRUCT_FIELDSENTRY._serialized_start=113
31
+ _STRUCT_FIELDSENTRY._serialized_end=182
32
+ _VALUE._serialized_start=185
33
+ _VALUE._serialized_end=419
34
+ _LISTVALUE._serialized_start=421
35
+ _LISTVALUE._serialized_end=472
36
+ # @@protoc_insertion_point(module_scope)
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/symbol_database.py ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """A database of Python protocol buffer generated symbols.
32
+
33
+ SymbolDatabase is the MessageFactory for messages generated at compile time,
34
+ and makes it easy to create new instances of a registered type, given only the
35
+ type's protocol buffer symbol name.
36
+
37
+ Example usage::
38
+
39
+ db = symbol_database.SymbolDatabase()
40
+
41
+ # Register symbols of interest, from one or multiple files.
42
+ db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
43
+ db.RegisterMessage(my_proto_pb2.MyMessage)
44
+ db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
45
+
46
+ # The database can be used as a MessageFactory, to generate types based on
47
+ # their name:
48
+ types = db.GetMessages(['my_proto.proto'])
49
+ my_message_instance = types['MyMessage']()
50
+
51
+ # The database's underlying descriptor pool can be queried, so it's not
52
+ # necessary to know a type's filename to be able to generate it:
53
+ filename = db.pool.FindFileContainingSymbol('MyMessage')
54
+ my_message_instance = db.GetMessages([filename])['MyMessage']()
55
+
56
+ # This functionality is also provided directly via a convenience method:
57
+ my_message_instance = db.GetSymbol('MyMessage')()
58
+ """
59
+
60
+
61
+ from google.protobuf.internal import api_implementation
62
+ from google.protobuf import descriptor_pool
63
+ from google.protobuf import message_factory
64
+
65
+
66
+ class SymbolDatabase(message_factory.MessageFactory):
67
+ """A database of Python generated symbols."""
68
+
69
+ def RegisterMessage(self, message):
70
+ """Registers the given message type in the local database.
71
+
72
+ Calls to GetSymbol() and GetMessages() will return messages registered here.
73
+
74
+ Args:
75
+ message: A :class:`google.protobuf.message.Message` subclass (or
76
+ instance); its descriptor will be registered.
77
+
78
+ Returns:
79
+ The provided message.
80
+ """
81
+
82
+ desc = message.DESCRIPTOR
83
+ self._classes[desc] = message
84
+ self.RegisterMessageDescriptor(desc)
85
+ return message
86
+
87
+ def RegisterMessageDescriptor(self, message_descriptor):
88
+ """Registers the given message descriptor in the local database.
89
+
90
+ Args:
91
+ message_descriptor (Descriptor): the message descriptor to add.
92
+ """
93
+ if api_implementation.Type() == 'python':
94
+ # pylint: disable=protected-access
95
+ self.pool._AddDescriptor(message_descriptor)
96
+
97
+ def RegisterEnumDescriptor(self, enum_descriptor):
98
+ """Registers the given enum descriptor in the local database.
99
+
100
+ Args:
101
+ enum_descriptor (EnumDescriptor): The enum descriptor to register.
102
+
103
+ Returns:
104
+ EnumDescriptor: The provided descriptor.
105
+ """
106
+ if api_implementation.Type() == 'python':
107
+ # pylint: disable=protected-access
108
+ self.pool._AddEnumDescriptor(enum_descriptor)
109
+ return enum_descriptor
110
+
111
+ def RegisterServiceDescriptor(self, service_descriptor):
112
+ """Registers the given service descriptor in the local database.
113
+
114
+ Args:
115
+ service_descriptor (ServiceDescriptor): the service descriptor to
116
+ register.
117
+ """
118
+ if api_implementation.Type() == 'python':
119
+ # pylint: disable=protected-access
120
+ self.pool._AddServiceDescriptor(service_descriptor)
121
+
122
+ def RegisterFileDescriptor(self, file_descriptor):
123
+ """Registers the given file descriptor in the local database.
124
+
125
+ Args:
126
+ file_descriptor (FileDescriptor): The file descriptor to register.
127
+ """
128
+ if api_implementation.Type() == 'python':
129
+ # pylint: disable=protected-access
130
+ self.pool._InternalAddFileDescriptor(file_descriptor)
131
+
132
+ def GetSymbol(self, symbol):
133
+ """Tries to find a symbol in the local database.
134
+
135
+ Currently, this method only returns message.Message instances, however, if
136
+ may be extended in future to support other symbol types.
137
+
138
+ Args:
139
+ symbol (str): a protocol buffer symbol.
140
+
141
+ Returns:
142
+ A Python class corresponding to the symbol.
143
+
144
+ Raises:
145
+ KeyError: if the symbol could not be found.
146
+ """
147
+
148
+ return self._classes[self.pool.FindMessageTypeByName(symbol)]
149
+
150
+ def GetMessages(self, files):
151
+ # TODO(amauryfa): Fix the differences with MessageFactory.
152
+ """Gets all registered messages from a specified file.
153
+
154
+ Only messages already created and registered will be returned; (this is the
155
+ case for imported _pb2 modules)
156
+ But unlike MessageFactory, this version also returns already defined nested
157
+ messages, but does not register any message extensions.
158
+
159
+ Args:
160
+ files (list[str]): The file names to extract messages from.
161
+
162
+ Returns:
163
+ A dictionary mapping proto names to the message classes.
164
+
165
+ Raises:
166
+ KeyError: if a file could not be found.
167
+ """
168
+
169
+ def _GetAllMessages(desc):
170
+ """Walk a message Descriptor and recursively yields all message names."""
171
+ yield desc
172
+ for msg_desc in desc.nested_types:
173
+ for nested_desc in _GetAllMessages(msg_desc):
174
+ yield nested_desc
175
+
176
+ result = {}
177
+ for file_name in files:
178
+ file_desc = self.pool.FindFileByName(file_name)
179
+ for msg_desc in file_desc.message_types_by_name.values():
180
+ for desc in _GetAllMessages(msg_desc):
181
+ try:
182
+ result[desc.full_name] = self._classes[desc]
183
+ except KeyError:
184
+ # This descriptor has no registered class, skip it.
185
+ pass
186
+ return result
187
+
188
+
189
+ _DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
190
+
191
+
192
+ def Default():
193
+ """Returns the default SymbolDatabase."""
194
+ return _DEFAULT
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/google/protobuf/text_format.py ADDED
@@ -0,0 +1,1795 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Protocol Buffers - Google's data interchange format
2
+ # Copyright 2008 Google Inc. All rights reserved.
3
+ # https://developers.google.com/protocol-buffers/
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # * Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ # * Redistributions in binary form must reproduce the above
12
+ # copyright notice, this list of conditions and the following disclaimer
13
+ # in the documentation and/or other materials provided with the
14
+ # distribution.
15
+ # * Neither the name of Google Inc. nor the names of its
16
+ # contributors may be used to endorse or promote products derived from
17
+ # this software without specific prior written permission.
18
+ #
19
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20
+ # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21
+ # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22
+ # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23
+ # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25
+ # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26
+ # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27
+ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28
+ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
+ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
+
31
+ """Contains routines for printing protocol messages in text format.
32
+
33
+ Simple usage example::
34
+
35
+ # Create a proto object and serialize it to a text proto string.
36
+ message = my_proto_pb2.MyMessage(foo='bar')
37
+ text_proto = text_format.MessageToString(message)
38
+
39
+ # Parse a text proto string.
40
+ message = text_format.Parse(text_proto, my_proto_pb2.MyMessage())
41
+ """
42
+
43
+ __author__ = 'kenton@google.com (Kenton Varda)'
44
+
45
+ # TODO(b/129989314) Import thread contention leads to test failures.
46
+ import encodings.raw_unicode_escape # pylint: disable=unused-import
47
+ import encodings.unicode_escape # pylint: disable=unused-import
48
+ import io
49
+ import math
50
+ import re
51
+
52
+ from google.protobuf.internal import decoder
53
+ from google.protobuf.internal import type_checkers
54
+ from google.protobuf import descriptor
55
+ from google.protobuf import text_encoding
56
+
57
+ # pylint: disable=g-import-not-at-top
58
+ __all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField',
59
+ 'PrintFieldValue', 'Merge', 'MessageToBytes']
60
+
61
+ _INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
62
+ type_checkers.Int32ValueChecker(),
63
+ type_checkers.Uint64ValueChecker(),
64
+ type_checkers.Int64ValueChecker())
65
+ _FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE)
66
+ _FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE)
67
+ _QUOTES = frozenset(("'", '"'))
68
+ _ANY_FULL_TYPE_NAME = 'google.protobuf.Any'
69
+
70
+
71
+ class Error(Exception):
72
+ """Top-level module error for text_format."""
73
+
74
+
75
+ class ParseError(Error):
76
+ """Thrown in case of text parsing or tokenizing error."""
77
+
78
+ def __init__(self, message=None, line=None, column=None):
79
+ if message is not None and line is not None:
80
+ loc = str(line)
81
+ if column is not None:
82
+ loc += ':{0}'.format(column)
83
+ message = '{0} : {1}'.format(loc, message)
84
+ if message is not None:
85
+ super(ParseError, self).__init__(message)
86
+ else:
87
+ super(ParseError, self).__init__()
88
+ self._line = line
89
+ self._column = column
90
+
91
+ def GetLine(self):
92
+ return self._line
93
+
94
+ def GetColumn(self):
95
+ return self._column
96
+
97
+
98
+ class TextWriter(object):
99
+
100
+ def __init__(self, as_utf8):
101
+ self._writer = io.StringIO()
102
+
103
+ def write(self, val):
104
+ return self._writer.write(val)
105
+
106
+ def close(self):
107
+ return self._writer.close()
108
+
109
+ def getvalue(self):
110
+ return self._writer.getvalue()
111
+
112
+
113
+ def MessageToString(
114
+ message,
115
+ as_utf8=False,
116
+ as_one_line=False,
117
+ use_short_repeated_primitives=False,
118
+ pointy_brackets=False,
119
+ use_index_order=False,
120
+ float_format=None,
121
+ double_format=None,
122
+ use_field_number=False,
123
+ descriptor_pool=None,
124
+ indent=0,
125
+ message_formatter=None,
126
+ print_unknown_fields=False,
127
+ force_colon=False):
128
+ # type: (...) -> str
129
+ """Convert protobuf message to text format.
130
+
131
+ Double values can be formatted compactly with 15 digits of
132
+ precision (which is the most that IEEE 754 "double" can guarantee)
133
+ using double_format='.15g'. To ensure that converting to text and back to a
134
+ proto will result in an identical value, double_format='.17g' should be used.
135
+
136
+ Args:
137
+ message: The protocol buffers message.
138
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
139
+ In Python 3 actual Unicode characters may appear as is in strings.
140
+ In Python 2 the return value will be valid UTF-8 rather than only ASCII.
141
+ as_one_line: Don't introduce newlines between fields.
142
+ use_short_repeated_primitives: Use short repeated format for primitives.
143
+ pointy_brackets: If True, use angle brackets instead of curly braces for
144
+ nesting.
145
+ use_index_order: If True, fields of a proto message will be printed using
146
+ the order defined in source code instead of the field number, extensions
147
+ will be printed at the end of the message and their relative order is
148
+ determined by the extension number. By default, use the field number
149
+ order.
150
+ float_format (str): If set, use this to specify float field formatting
151
+ (per the "Format Specification Mini-Language"); otherwise, shortest float
152
+ that has same value in wire will be printed. Also affect double field
153
+ if double_format is not set but float_format is set.
154
+ double_format (str): If set, use this to specify double field formatting
155
+ (per the "Format Specification Mini-Language"); if it is not set but
156
+ float_format is set, use float_format. Otherwise, use ``str()``
157
+ use_field_number: If True, print field numbers instead of names.
158
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
159
+ indent (int): The initial indent level, in terms of spaces, for pretty
160
+ print.
161
+ message_formatter (function(message, indent, as_one_line) -> unicode|None):
162
+ Custom formatter for selected sub-messages (usually based on message
163
+ type). Use to pretty print parts of the protobuf for easier diffing.
164
+ print_unknown_fields: If True, unknown fields will be printed.
165
+ force_colon: If set, a colon will be added after the field name even if the
166
+ field is a proto message.
167
+
168
+ Returns:
169
+ str: A string of the text formatted protocol buffer message.
170
+ """
171
+ out = TextWriter(as_utf8)
172
+ printer = _Printer(
173
+ out,
174
+ indent,
175
+ as_utf8,
176
+ as_one_line,
177
+ use_short_repeated_primitives,
178
+ pointy_brackets,
179
+ use_index_order,
180
+ float_format,
181
+ double_format,
182
+ use_field_number,
183
+ descriptor_pool,
184
+ message_formatter,
185
+ print_unknown_fields=print_unknown_fields,
186
+ force_colon=force_colon)
187
+ printer.PrintMessage(message)
188
+ result = out.getvalue()
189
+ out.close()
190
+ if as_one_line:
191
+ return result.rstrip()
192
+ return result
193
+
194
+
195
+ def MessageToBytes(message, **kwargs):
196
+ # type: (...) -> bytes
197
+ """Convert protobuf message to encoded text format. See MessageToString."""
198
+ text = MessageToString(message, **kwargs)
199
+ if isinstance(text, bytes):
200
+ return text
201
+ codec = 'utf-8' if kwargs.get('as_utf8') else 'ascii'
202
+ return text.encode(codec)
203
+
204
+
205
+ def _IsMapEntry(field):
206
+ return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
207
+ field.message_type.has_options and
208
+ field.message_type.GetOptions().map_entry)
209
+
210
+
211
+ def PrintMessage(message,
212
+ out,
213
+ indent=0,
214
+ as_utf8=False,
215
+ as_one_line=False,
216
+ use_short_repeated_primitives=False,
217
+ pointy_brackets=False,
218
+ use_index_order=False,
219
+ float_format=None,
220
+ double_format=None,
221
+ use_field_number=False,
222
+ descriptor_pool=None,
223
+ message_formatter=None,
224
+ print_unknown_fields=False,
225
+ force_colon=False):
226
+ printer = _Printer(
227
+ out=out, indent=indent, as_utf8=as_utf8,
228
+ as_one_line=as_one_line,
229
+ use_short_repeated_primitives=use_short_repeated_primitives,
230
+ pointy_brackets=pointy_brackets,
231
+ use_index_order=use_index_order,
232
+ float_format=float_format,
233
+ double_format=double_format,
234
+ use_field_number=use_field_number,
235
+ descriptor_pool=descriptor_pool,
236
+ message_formatter=message_formatter,
237
+ print_unknown_fields=print_unknown_fields,
238
+ force_colon=force_colon)
239
+ printer.PrintMessage(message)
240
+
241
+
242
+ def PrintField(field,
243
+ value,
244
+ out,
245
+ indent=0,
246
+ as_utf8=False,
247
+ as_one_line=False,
248
+ use_short_repeated_primitives=False,
249
+ pointy_brackets=False,
250
+ use_index_order=False,
251
+ float_format=None,
252
+ double_format=None,
253
+ message_formatter=None,
254
+ print_unknown_fields=False,
255
+ force_colon=False):
256
+ """Print a single field name/value pair."""
257
+ printer = _Printer(out, indent, as_utf8, as_one_line,
258
+ use_short_repeated_primitives, pointy_brackets,
259
+ use_index_order, float_format, double_format,
260
+ message_formatter=message_formatter,
261
+ print_unknown_fields=print_unknown_fields,
262
+ force_colon=force_colon)
263
+ printer.PrintField(field, value)
264
+
265
+
266
+ def PrintFieldValue(field,
267
+ value,
268
+ out,
269
+ indent=0,
270
+ as_utf8=False,
271
+ as_one_line=False,
272
+ use_short_repeated_primitives=False,
273
+ pointy_brackets=False,
274
+ use_index_order=False,
275
+ float_format=None,
276
+ double_format=None,
277
+ message_formatter=None,
278
+ print_unknown_fields=False,
279
+ force_colon=False):
280
+ """Print a single field value (not including name)."""
281
+ printer = _Printer(out, indent, as_utf8, as_one_line,
282
+ use_short_repeated_primitives, pointy_brackets,
283
+ use_index_order, float_format, double_format,
284
+ message_formatter=message_formatter,
285
+ print_unknown_fields=print_unknown_fields,
286
+ force_colon=force_colon)
287
+ printer.PrintFieldValue(field, value)
288
+
289
+
290
+ def _BuildMessageFromTypeName(type_name, descriptor_pool):
291
+ """Returns a protobuf message instance.
292
+
293
+ Args:
294
+ type_name: Fully-qualified protobuf message type name string.
295
+ descriptor_pool: DescriptorPool instance.
296
+
297
+ Returns:
298
+ A Message instance of type matching type_name, or None if the a Descriptor
299
+ wasn't found matching type_name.
300
+ """
301
+ # pylint: disable=g-import-not-at-top
302
+ if descriptor_pool is None:
303
+ from google.protobuf import descriptor_pool as pool_mod
304
+ descriptor_pool = pool_mod.Default()
305
+ from google.protobuf import symbol_database
306
+ database = symbol_database.Default()
307
+ try:
308
+ message_descriptor = descriptor_pool.FindMessageTypeByName(type_name)
309
+ except KeyError:
310
+ return None
311
+ message_type = database.GetPrototype(message_descriptor)
312
+ return message_type()
313
+
314
+
315
+ # These values must match WireType enum in google/protobuf/wire_format.h.
316
+ WIRETYPE_LENGTH_DELIMITED = 2
317
+ WIRETYPE_START_GROUP = 3
318
+
319
+
320
+ class _Printer(object):
321
+ """Text format printer for protocol message."""
322
+
323
+ def __init__(
324
+ self,
325
+ out,
326
+ indent=0,
327
+ as_utf8=False,
328
+ as_one_line=False,
329
+ use_short_repeated_primitives=False,
330
+ pointy_brackets=False,
331
+ use_index_order=False,
332
+ float_format=None,
333
+ double_format=None,
334
+ use_field_number=False,
335
+ descriptor_pool=None,
336
+ message_formatter=None,
337
+ print_unknown_fields=False,
338
+ force_colon=False):
339
+ """Initialize the Printer.
340
+
341
+ Double values can be formatted compactly with 15 digits of precision
342
+ (which is the most that IEEE 754 "double" can guarantee) using
343
+ double_format='.15g'. To ensure that converting to text and back to a proto
344
+ will result in an identical value, double_format='.17g' should be used.
345
+
346
+ Args:
347
+ out: To record the text format result.
348
+ indent: The initial indent level for pretty print.
349
+ as_utf8: Return unescaped Unicode for non-ASCII characters.
350
+ In Python 3 actual Unicode characters may appear as is in strings.
351
+ In Python 2 the return value will be valid UTF-8 rather than ASCII.
352
+ as_one_line: Don't introduce newlines between fields.
353
+ use_short_repeated_primitives: Use short repeated format for primitives.
354
+ pointy_brackets: If True, use angle brackets instead of curly braces for
355
+ nesting.
356
+ use_index_order: If True, print fields of a proto message using the order
357
+ defined in source code instead of the field number. By default, use the
358
+ field number order.
359
+ float_format: If set, use this to specify float field formatting
360
+ (per the "Format Specification Mini-Language"); otherwise, shortest
361
+ float that has same value in wire will be printed. Also affect double
362
+ field if double_format is not set but float_format is set.
363
+ double_format: If set, use this to specify double field formatting
364
+ (per the "Format Specification Mini-Language"); if it is not set but
365
+ float_format is set, use float_format. Otherwise, str() is used.
366
+ use_field_number: If True, print field numbers instead of names.
367
+ descriptor_pool: A DescriptorPool used to resolve Any types.
368
+ message_formatter: A function(message, indent, as_one_line): unicode|None
369
+ to custom format selected sub-messages (usually based on message type).
370
+ Use to pretty print parts of the protobuf for easier diffing.
371
+ print_unknown_fields: If True, unknown fields will be printed.
372
+ force_colon: If set, a colon will be added after the field name even if
373
+ the field is a proto message.
374
+ """
375
+ self.out = out
376
+ self.indent = indent
377
+ self.as_utf8 = as_utf8
378
+ self.as_one_line = as_one_line
379
+ self.use_short_repeated_primitives = use_short_repeated_primitives
380
+ self.pointy_brackets = pointy_brackets
381
+ self.use_index_order = use_index_order
382
+ self.float_format = float_format
383
+ if double_format is not None:
384
+ self.double_format = double_format
385
+ else:
386
+ self.double_format = float_format
387
+ self.use_field_number = use_field_number
388
+ self.descriptor_pool = descriptor_pool
389
+ self.message_formatter = message_formatter
390
+ self.print_unknown_fields = print_unknown_fields
391
+ self.force_colon = force_colon
392
+
393
+ def _TryPrintAsAnyMessage(self, message):
394
+ """Serializes if message is a google.protobuf.Any field."""
395
+ if '/' not in message.type_url:
396
+ return False
397
+ packed_message = _BuildMessageFromTypeName(message.TypeName(),
398
+ self.descriptor_pool)
399
+ if packed_message:
400
+ packed_message.MergeFromString(message.value)
401
+ colon = ':' if self.force_colon else ''
402
+ self.out.write('%s[%s]%s ' % (self.indent * ' ', message.type_url, colon))
403
+ self._PrintMessageFieldValue(packed_message)
404
+ self.out.write(' ' if self.as_one_line else '\n')
405
+ return True
406
+ else:
407
+ return False
408
+
409
+ def _TryCustomFormatMessage(self, message):
410
+ formatted = self.message_formatter(message, self.indent, self.as_one_line)
411
+ if formatted is None:
412
+ return False
413
+
414
+ out = self.out
415
+ out.write(' ' * self.indent)
416
+ out.write(formatted)
417
+ out.write(' ' if self.as_one_line else '\n')
418
+ return True
419
+
420
+ def PrintMessage(self, message):
421
+ """Convert protobuf message to text format.
422
+
423
+ Args:
424
+ message: The protocol buffers message.
425
+ """
426
+ if self.message_formatter and self._TryCustomFormatMessage(message):
427
+ return
428
+ if (message.DESCRIPTOR.full_name == _ANY_FULL_TYPE_NAME and
429
+ self._TryPrintAsAnyMessage(message)):
430
+ return
431
+ fields = message.ListFields()
432
+ if self.use_index_order:
433
+ fields.sort(
434
+ key=lambda x: x[0].number if x[0].is_extension else x[0].index)
435
+ for field, value in fields:
436
+ if _IsMapEntry(field):
437
+ for key in sorted(value):
438
+ # This is slow for maps with submessage entries because it copies the
439
+ # entire tree. Unfortunately this would take significant refactoring
440
+ # of this file to work around.
441
+ #
442
+ # TODO(haberman): refactor and optimize if this becomes an issue.
443
+ entry_submsg = value.GetEntryClass()(key=key, value=value[key])
444
+ self.PrintField(field, entry_submsg)
445
+ elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
446
+ if (self.use_short_repeated_primitives
447
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE
448
+ and field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_STRING):
449
+ self._PrintShortRepeatedPrimitivesValue(field, value)
450
+ else:
451
+ for element in value:
452
+ self.PrintField(field, element)
453
+ else:
454
+ self.PrintField(field, value)
455
+
456
+ if self.print_unknown_fields:
457
+ self._PrintUnknownFields(message.UnknownFields())
458
+
459
+ def _PrintUnknownFields(self, unknown_fields):
460
+ """Print unknown fields."""
461
+ out = self.out
462
+ for field in unknown_fields:
463
+ out.write(' ' * self.indent)
464
+ out.write(str(field.field_number))
465
+ if field.wire_type == WIRETYPE_START_GROUP:
466
+ if self.as_one_line:
467
+ out.write(' { ')
468
+ else:
469
+ out.write(' {\n')
470
+ self.indent += 2
471
+
472
+ self._PrintUnknownFields(field.data)
473
+
474
+ if self.as_one_line:
475
+ out.write('} ')
476
+ else:
477
+ self.indent -= 2
478
+ out.write(' ' * self.indent + '}\n')
479
+ elif field.wire_type == WIRETYPE_LENGTH_DELIMITED:
480
+ try:
481
+ # If this field is parseable as a Message, it is probably
482
+ # an embedded message.
483
+ # pylint: disable=protected-access
484
+ (embedded_unknown_message, pos) = decoder._DecodeUnknownFieldSet(
485
+ memoryview(field.data), 0, len(field.data))
486
+ except Exception: # pylint: disable=broad-except
487
+ pos = 0
488
+
489
+ if pos == len(field.data):
490
+ if self.as_one_line:
491
+ out.write(' { ')
492
+ else:
493
+ out.write(' {\n')
494
+ self.indent += 2
495
+
496
+ self._PrintUnknownFields(embedded_unknown_message)
497
+
498
+ if self.as_one_line:
499
+ out.write('} ')
500
+ else:
501
+ self.indent -= 2
502
+ out.write(' ' * self.indent + '}\n')
503
+ else:
504
+ # A string or bytes field. self.as_utf8 may not work.
505
+ out.write(': \"')
506
+ out.write(text_encoding.CEscape(field.data, False))
507
+ out.write('\" ' if self.as_one_line else '\"\n')
508
+ else:
509
+ # varint, fixed32, fixed64
510
+ out.write(': ')
511
+ out.write(str(field.data))
512
+ out.write(' ' if self.as_one_line else '\n')
513
+
514
+ def _PrintFieldName(self, field):
515
+ """Print field name."""
516
+ out = self.out
517
+ out.write(' ' * self.indent)
518
+ if self.use_field_number:
519
+ out.write(str(field.number))
520
+ else:
521
+ if field.is_extension:
522
+ out.write('[')
523
+ if (field.containing_type.GetOptions().message_set_wire_format and
524
+ field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
525
+ field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
526
+ out.write(field.message_type.full_name)
527
+ else:
528
+ out.write(field.full_name)
529
+ out.write(']')
530
+ elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
531
+ # For groups, use the capitalized name.
532
+ out.write(field.message_type.name)
533
+ else:
534
+ out.write(field.name)
535
+
536
+ if (self.force_colon or
537
+ field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE):
538
+ # The colon is optional in this case, but our cross-language golden files
539
+ # don't include it. Here, the colon is only included if force_colon is
540
+ # set to True
541
+ out.write(':')
542
+
543
+ def PrintField(self, field, value):
544
+ """Print a single field name/value pair."""
545
+ self._PrintFieldName(field)
546
+ self.out.write(' ')
547
+ self.PrintFieldValue(field, value)
548
+ self.out.write(' ' if self.as_one_line else '\n')
549
+
550
+ def _PrintShortRepeatedPrimitivesValue(self, field, value):
551
+ """"Prints short repeated primitives value."""
552
+ # Note: this is called only when value has at least one element.
553
+ self._PrintFieldName(field)
554
+ self.out.write(' [')
555
+ for i in range(len(value) - 1):
556
+ self.PrintFieldValue(field, value[i])
557
+ self.out.write(', ')
558
+ self.PrintFieldValue(field, value[-1])
559
+ self.out.write(']')
560
+ self.out.write(' ' if self.as_one_line else '\n')
561
+
562
+ def _PrintMessageFieldValue(self, value):
563
+ if self.pointy_brackets:
564
+ openb = '<'
565
+ closeb = '>'
566
+ else:
567
+ openb = '{'
568
+ closeb = '}'
569
+
570
+ if self.as_one_line:
571
+ self.out.write('%s ' % openb)
572
+ self.PrintMessage(value)
573
+ self.out.write(closeb)
574
+ else:
575
+ self.out.write('%s\n' % openb)
576
+ self.indent += 2
577
+ self.PrintMessage(value)
578
+ self.indent -= 2
579
+ self.out.write(' ' * self.indent + closeb)
580
+
581
+ def PrintFieldValue(self, field, value):
582
+ """Print a single field value (not including name).
583
+
584
+ For repeated fields, the value should be a single element.
585
+
586
+ Args:
587
+ field: The descriptor of the field to be printed.
588
+ value: The value of the field.
589
+ """
590
+ out = self.out
591
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
592
+ self._PrintMessageFieldValue(value)
593
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
594
+ enum_value = field.enum_type.values_by_number.get(value, None)
595
+ if enum_value is not None:
596
+ out.write(enum_value.name)
597
+ else:
598
+ out.write(str(value))
599
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
600
+ out.write('\"')
601
+ if isinstance(value, str) and not self.as_utf8:
602
+ out_value = value.encode('utf-8')
603
+ else:
604
+ out_value = value
605
+ if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
606
+ # We always need to escape all binary data in TYPE_BYTES fields.
607
+ out_as_utf8 = False
608
+ else:
609
+ out_as_utf8 = self.as_utf8
610
+ out.write(text_encoding.CEscape(out_value, out_as_utf8))
611
+ out.write('\"')
612
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
613
+ if value:
614
+ out.write('true')
615
+ else:
616
+ out.write('false')
617
+ elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
618
+ if self.float_format is not None:
619
+ out.write('{1:{0}}'.format(self.float_format, value))
620
+ else:
621
+ if math.isnan(value):
622
+ out.write(str(value))
623
+ else:
624
+ out.write(str(type_checkers.ToShortestFloat(value)))
625
+ elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_DOUBLE and
626
+ self.double_format is not None):
627
+ out.write('{1:{0}}'.format(self.double_format, value))
628
+ else:
629
+ out.write(str(value))
630
+
631
+
632
+ def Parse(text,
633
+ message,
634
+ allow_unknown_extension=False,
635
+ allow_field_number=False,
636
+ descriptor_pool=None,
637
+ allow_unknown_field=False):
638
+ """Parses a text representation of a protocol message into a message.
639
+
640
+ NOTE: for historical reasons this function does not clear the input
641
+ message. This is different from what the binary msg.ParseFrom(...) does.
642
+ If text contains a field already set in message, the value is appended if the
643
+ field is repeated. Otherwise, an error is raised.
644
+
645
+ Example::
646
+
647
+ a = MyProto()
648
+ a.repeated_field.append('test')
649
+ b = MyProto()
650
+
651
+ # Repeated fields are combined
652
+ text_format.Parse(repr(a), b)
653
+ text_format.Parse(repr(a), b) # repeated_field contains ["test", "test"]
654
+
655
+ # Non-repeated fields cannot be overwritten
656
+ a.singular_field = 1
657
+ b.singular_field = 2
658
+ text_format.Parse(repr(a), b) # ParseError
659
+
660
+ # Binary version:
661
+ b.ParseFromString(a.SerializeToString()) # repeated_field is now "test"
662
+
663
+ Caller is responsible for clearing the message as needed.
664
+
665
+ Args:
666
+ text (str): Message text representation.
667
+ message (Message): A protocol buffer message to merge into.
668
+ allow_unknown_extension: if True, skip over missing extensions and keep
669
+ parsing
670
+ allow_field_number: if True, both field number and field name are allowed.
671
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
672
+ allow_unknown_field: if True, skip over unknown field and keep
673
+ parsing. Avoid to use this option if possible. It may hide some
674
+ errors (e.g. spelling error on field name)
675
+
676
+ Returns:
677
+ Message: The same message passed as argument.
678
+
679
+ Raises:
680
+ ParseError: On text parsing problems.
681
+ """
682
+ return ParseLines(text.split(b'\n' if isinstance(text, bytes) else u'\n'),
683
+ message,
684
+ allow_unknown_extension,
685
+ allow_field_number,
686
+ descriptor_pool=descriptor_pool,
687
+ allow_unknown_field=allow_unknown_field)
688
+
689
+
690
+ def Merge(text,
691
+ message,
692
+ allow_unknown_extension=False,
693
+ allow_field_number=False,
694
+ descriptor_pool=None,
695
+ allow_unknown_field=False):
696
+ """Parses a text representation of a protocol message into a message.
697
+
698
+ Like Parse(), but allows repeated values for a non-repeated field, and uses
699
+ the last one. This means any non-repeated, top-level fields specified in text
700
+ replace those in the message.
701
+
702
+ Args:
703
+ text (str): Message text representation.
704
+ message (Message): A protocol buffer message to merge into.
705
+ allow_unknown_extension: if True, skip over missing extensions and keep
706
+ parsing
707
+ allow_field_number: if True, both field number and field name are allowed.
708
+ descriptor_pool (DescriptorPool): Descriptor pool used to resolve Any types.
709
+ allow_unknown_field: if True, skip over unknown field and keep
710
+ parsing. Avoid to use this option if possible. It may hide some
711
+ errors (e.g. spelling error on field name)
712
+
713
+ Returns:
714
+ Message: The same message passed as argument.
715
+
716
+ Raises:
717
+ ParseError: On text parsing problems.
718
+ """
719
+ return MergeLines(
720
+ text.split(b'\n' if isinstance(text, bytes) else u'\n'),
721
+ message,
722
+ allow_unknown_extension,
723
+ allow_field_number,
724
+ descriptor_pool=descriptor_pool,
725
+ allow_unknown_field=allow_unknown_field)
726
+
727
+
728
+ def ParseLines(lines,
729
+ message,
730
+ allow_unknown_extension=False,
731
+ allow_field_number=False,
732
+ descriptor_pool=None,
733
+ allow_unknown_field=False):
734
+ """Parses a text representation of a protocol message into a message.
735
+
736
+ See Parse() for caveats.
737
+
738
+ Args:
739
+ lines: An iterable of lines of a message's text representation.
740
+ message: A protocol buffer message to merge into.
741
+ allow_unknown_extension: if True, skip over missing extensions and keep
742
+ parsing
743
+ allow_field_number: if True, both field number and field name are allowed.
744
+ descriptor_pool: A DescriptorPool used to resolve Any types.
745
+ allow_unknown_field: if True, skip over unknown field and keep
746
+ parsing. Avoid to use this option if possible. It may hide some
747
+ errors (e.g. spelling error on field name)
748
+
749
+ Returns:
750
+ The same message passed as argument.
751
+
752
+ Raises:
753
+ ParseError: On text parsing problems.
754
+ """
755
+ parser = _Parser(allow_unknown_extension,
756
+ allow_field_number,
757
+ descriptor_pool=descriptor_pool,
758
+ allow_unknown_field=allow_unknown_field)
759
+ return parser.ParseLines(lines, message)
760
+
761
+
762
+ def MergeLines(lines,
763
+ message,
764
+ allow_unknown_extension=False,
765
+ allow_field_number=False,
766
+ descriptor_pool=None,
767
+ allow_unknown_field=False):
768
+ """Parses a text representation of a protocol message into a message.
769
+
770
+ See Merge() for more details.
771
+
772
+ Args:
773
+ lines: An iterable of lines of a message's text representation.
774
+ message: A protocol buffer message to merge into.
775
+ allow_unknown_extension: if True, skip over missing extensions and keep
776
+ parsing
777
+ allow_field_number: if True, both field number and field name are allowed.
778
+ descriptor_pool: A DescriptorPool used to resolve Any types.
779
+ allow_unknown_field: if True, skip over unknown field and keep
780
+ parsing. Avoid to use this option if possible. It may hide some
781
+ errors (e.g. spelling error on field name)
782
+
783
+ Returns:
784
+ The same message passed as argument.
785
+
786
+ Raises:
787
+ ParseError: On text parsing problems.
788
+ """
789
+ parser = _Parser(allow_unknown_extension,
790
+ allow_field_number,
791
+ descriptor_pool=descriptor_pool,
792
+ allow_unknown_field=allow_unknown_field)
793
+ return parser.MergeLines(lines, message)
794
+
795
+
796
+ class _Parser(object):
797
+ """Text format parser for protocol message."""
798
+
799
+ def __init__(self,
800
+ allow_unknown_extension=False,
801
+ allow_field_number=False,
802
+ descriptor_pool=None,
803
+ allow_unknown_field=False):
804
+ self.allow_unknown_extension = allow_unknown_extension
805
+ self.allow_field_number = allow_field_number
806
+ self.descriptor_pool = descriptor_pool
807
+ self.allow_unknown_field = allow_unknown_field
808
+
809
+ def ParseLines(self, lines, message):
810
+ """Parses a text representation of a protocol message into a message."""
811
+ self._allow_multiple_scalars = False
812
+ self._ParseOrMerge(lines, message)
813
+ return message
814
+
815
+ def MergeLines(self, lines, message):
816
+ """Merges a text representation of a protocol message into a message."""
817
+ self._allow_multiple_scalars = True
818
+ self._ParseOrMerge(lines, message)
819
+ return message
820
+
821
+ def _ParseOrMerge(self, lines, message):
822
+ """Converts a text representation of a protocol message into a message.
823
+
824
+ Args:
825
+ lines: Lines of a message's text representation.
826
+ message: A protocol buffer message to merge into.
827
+
828
+ Raises:
829
+ ParseError: On text parsing problems.
830
+ """
831
+ # Tokenize expects native str lines.
832
+ str_lines = (
833
+ line if isinstance(line, str) else line.decode('utf-8')
834
+ for line in lines)
835
+ tokenizer = Tokenizer(str_lines)
836
+ while not tokenizer.AtEnd():
837
+ self._MergeField(tokenizer, message)
838
+
839
+ def _MergeField(self, tokenizer, message):
840
+ """Merges a single protocol message field into a message.
841
+
842
+ Args:
843
+ tokenizer: A tokenizer to parse the field name and values.
844
+ message: A protocol message to record the data.
845
+
846
+ Raises:
847
+ ParseError: In case of text parsing problems.
848
+ """
849
+ message_descriptor = message.DESCRIPTOR
850
+ if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and
851
+ tokenizer.TryConsume('[')):
852
+ type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
853
+ tokenizer.Consume(']')
854
+ tokenizer.TryConsume(':')
855
+ if tokenizer.TryConsume('<'):
856
+ expanded_any_end_token = '>'
857
+ else:
858
+ tokenizer.Consume('{')
859
+ expanded_any_end_token = '}'
860
+ expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
861
+ self.descriptor_pool)
862
+ if not expanded_any_sub_message:
863
+ raise ParseError('Type %s not found in descriptor pool' %
864
+ packed_type_name)
865
+ while not tokenizer.TryConsume(expanded_any_end_token):
866
+ if tokenizer.AtEnd():
867
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
868
+ (expanded_any_end_token,))
869
+ self._MergeField(tokenizer, expanded_any_sub_message)
870
+ deterministic = False
871
+
872
+ message.Pack(expanded_any_sub_message,
873
+ type_url_prefix=type_url_prefix,
874
+ deterministic=deterministic)
875
+ return
876
+
877
+ if tokenizer.TryConsume('['):
878
+ name = [tokenizer.ConsumeIdentifier()]
879
+ while tokenizer.TryConsume('.'):
880
+ name.append(tokenizer.ConsumeIdentifier())
881
+ name = '.'.join(name)
882
+
883
+ if not message_descriptor.is_extendable:
884
+ raise tokenizer.ParseErrorPreviousToken(
885
+ 'Message type "%s" does not have extensions.' %
886
+ message_descriptor.full_name)
887
+ # pylint: disable=protected-access
888
+ field = message.Extensions._FindExtensionByName(name)
889
+ # pylint: enable=protected-access
890
+
891
+
892
+ if not field:
893
+ if self.allow_unknown_extension:
894
+ field = None
895
+ else:
896
+ raise tokenizer.ParseErrorPreviousToken(
897
+ 'Extension "%s" not registered. '
898
+ 'Did you import the _pb2 module which defines it? '
899
+ 'If you are trying to place the extension in the MessageSet '
900
+ 'field of another message that is in an Any or MessageSet field, '
901
+ 'that message\'s _pb2 module must be imported as well' % name)
902
+ elif message_descriptor != field.containing_type:
903
+ raise tokenizer.ParseErrorPreviousToken(
904
+ 'Extension "%s" does not extend message type "%s".' %
905
+ (name, message_descriptor.full_name))
906
+
907
+ tokenizer.Consume(']')
908
+
909
+ else:
910
+ name = tokenizer.ConsumeIdentifierOrNumber()
911
+ if self.allow_field_number and name.isdigit():
912
+ number = ParseInteger(name, True, True)
913
+ field = message_descriptor.fields_by_number.get(number, None)
914
+ if not field and message_descriptor.is_extendable:
915
+ field = message.Extensions._FindExtensionByNumber(number)
916
+ else:
917
+ field = message_descriptor.fields_by_name.get(name, None)
918
+
919
+ # Group names are expected to be capitalized as they appear in the
920
+ # .proto file, which actually matches their type names, not their field
921
+ # names.
922
+ if not field:
923
+ field = message_descriptor.fields_by_name.get(name.lower(), None)
924
+ if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
925
+ field = None
926
+
927
+ if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
928
+ field.message_type.name != name):
929
+ field = None
930
+
931
+ if not field and not self.allow_unknown_field:
932
+ raise tokenizer.ParseErrorPreviousToken(
933
+ 'Message type "%s" has no field named "%s".' %
934
+ (message_descriptor.full_name, name))
935
+
936
+ if field:
937
+ if not self._allow_multiple_scalars and field.containing_oneof:
938
+ # Check if there's a different field set in this oneof.
939
+ # Note that we ignore the case if the same field was set before, and we
940
+ # apply _allow_multiple_scalars to non-scalar fields as well.
941
+ which_oneof = message.WhichOneof(field.containing_oneof.name)
942
+ if which_oneof is not None and which_oneof != field.name:
943
+ raise tokenizer.ParseErrorPreviousToken(
944
+ 'Field "%s" is specified along with field "%s", another member '
945
+ 'of oneof "%s" for message type "%s".' %
946
+ (field.name, which_oneof, field.containing_oneof.name,
947
+ message_descriptor.full_name))
948
+
949
+ if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
950
+ tokenizer.TryConsume(':')
951
+ merger = self._MergeMessageField
952
+ else:
953
+ tokenizer.Consume(':')
954
+ merger = self._MergeScalarField
955
+
956
+ if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
957
+ tokenizer.TryConsume('[')):
958
+ # Short repeated format, e.g. "foo: [1, 2, 3]"
959
+ if not tokenizer.TryConsume(']'):
960
+ while True:
961
+ merger(tokenizer, message, field)
962
+ if tokenizer.TryConsume(']'):
963
+ break
964
+ tokenizer.Consume(',')
965
+
966
+ else:
967
+ merger(tokenizer, message, field)
968
+
969
+ else: # Proto field is unknown.
970
+ assert (self.allow_unknown_extension or self.allow_unknown_field)
971
+ _SkipFieldContents(tokenizer)
972
+
973
+ # For historical reasons, fields may optionally be separated by commas or
974
+ # semicolons.
975
+ if not tokenizer.TryConsume(','):
976
+ tokenizer.TryConsume(';')
977
+
978
+
979
+ def _ConsumeAnyTypeUrl(self, tokenizer):
980
+ """Consumes a google.protobuf.Any type URL and returns the type name."""
981
+ # Consume "type.googleapis.com/".
982
+ prefix = [tokenizer.ConsumeIdentifier()]
983
+ tokenizer.Consume('.')
984
+ prefix.append(tokenizer.ConsumeIdentifier())
985
+ tokenizer.Consume('.')
986
+ prefix.append(tokenizer.ConsumeIdentifier())
987
+ tokenizer.Consume('/')
988
+ # Consume the fully-qualified type name.
989
+ name = [tokenizer.ConsumeIdentifier()]
990
+ while tokenizer.TryConsume('.'):
991
+ name.append(tokenizer.ConsumeIdentifier())
992
+ return '.'.join(prefix), '.'.join(name)
993
+
994
+ def _MergeMessageField(self, tokenizer, message, field):
995
+ """Merges a single scalar field into a message.
996
+
997
+ Args:
998
+ tokenizer: A tokenizer to parse the field value.
999
+ message: The message of which field is a member.
1000
+ field: The descriptor of the field to be merged.
1001
+
1002
+ Raises:
1003
+ ParseError: In case of text parsing problems.
1004
+ """
1005
+ is_map_entry = _IsMapEntry(field)
1006
+
1007
+ if tokenizer.TryConsume('<'):
1008
+ end_token = '>'
1009
+ else:
1010
+ tokenizer.Consume('{')
1011
+ end_token = '}'
1012
+
1013
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
1014
+ if field.is_extension:
1015
+ sub_message = message.Extensions[field].add()
1016
+ elif is_map_entry:
1017
+ sub_message = getattr(message, field.name).GetEntryClass()()
1018
+ else:
1019
+ sub_message = getattr(message, field.name).add()
1020
+ else:
1021
+ if field.is_extension:
1022
+ if (not self._allow_multiple_scalars and
1023
+ message.HasExtension(field)):
1024
+ raise tokenizer.ParseErrorPreviousToken(
1025
+ 'Message type "%s" should not have multiple "%s" extensions.' %
1026
+ (message.DESCRIPTOR.full_name, field.full_name))
1027
+ sub_message = message.Extensions[field]
1028
+ else:
1029
+ # Also apply _allow_multiple_scalars to message field.
1030
+ # TODO(jieluo): Change to _allow_singular_overwrites.
1031
+ if (not self._allow_multiple_scalars and
1032
+ message.HasField(field.name)):
1033
+ raise tokenizer.ParseErrorPreviousToken(
1034
+ 'Message type "%s" should not have multiple "%s" fields.' %
1035
+ (message.DESCRIPTOR.full_name, field.name))
1036
+ sub_message = getattr(message, field.name)
1037
+ sub_message.SetInParent()
1038
+
1039
+ while not tokenizer.TryConsume(end_token):
1040
+ if tokenizer.AtEnd():
1041
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token,))
1042
+ self._MergeField(tokenizer, sub_message)
1043
+
1044
+ if is_map_entry:
1045
+ value_cpptype = field.message_type.fields_by_name['value'].cpp_type
1046
+ if value_cpptype == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
1047
+ value = getattr(message, field.name)[sub_message.key]
1048
+ value.CopyFrom(sub_message.value)
1049
+ else:
1050
+ getattr(message, field.name)[sub_message.key] = sub_message.value
1051
+
1052
+ @staticmethod
1053
+ def _IsProto3Syntax(message):
1054
+ message_descriptor = message.DESCRIPTOR
1055
+ return (hasattr(message_descriptor, 'syntax') and
1056
+ message_descriptor.syntax == 'proto3')
1057
+
1058
+ def _MergeScalarField(self, tokenizer, message, field):
1059
+ """Merges a single scalar field into a message.
1060
+
1061
+ Args:
1062
+ tokenizer: A tokenizer to parse the field value.
1063
+ message: A protocol message to record the data.
1064
+ field: The descriptor of the field to be merged.
1065
+
1066
+ Raises:
1067
+ ParseError: In case of text parsing problems.
1068
+ RuntimeError: On runtime errors.
1069
+ """
1070
+ _ = self.allow_unknown_extension
1071
+ value = None
1072
+
1073
+ if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
1074
+ descriptor.FieldDescriptor.TYPE_SINT32,
1075
+ descriptor.FieldDescriptor.TYPE_SFIXED32):
1076
+ value = _ConsumeInt32(tokenizer)
1077
+ elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
1078
+ descriptor.FieldDescriptor.TYPE_SINT64,
1079
+ descriptor.FieldDescriptor.TYPE_SFIXED64):
1080
+ value = _ConsumeInt64(tokenizer)
1081
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
1082
+ descriptor.FieldDescriptor.TYPE_FIXED32):
1083
+ value = _ConsumeUint32(tokenizer)
1084
+ elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
1085
+ descriptor.FieldDescriptor.TYPE_FIXED64):
1086
+ value = _ConsumeUint64(tokenizer)
1087
+ elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
1088
+ descriptor.FieldDescriptor.TYPE_DOUBLE):
1089
+ value = tokenizer.ConsumeFloat()
1090
+ elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
1091
+ value = tokenizer.ConsumeBool()
1092
+ elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
1093
+ value = tokenizer.ConsumeString()
1094
+ elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
1095
+ value = tokenizer.ConsumeByteString()
1096
+ elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
1097
+ value = tokenizer.ConsumeEnum(field)
1098
+ else:
1099
+ raise RuntimeError('Unknown field type %d' % field.type)
1100
+
1101
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
1102
+ if field.is_extension:
1103
+ message.Extensions[field].append(value)
1104
+ else:
1105
+ getattr(message, field.name).append(value)
1106
+ else:
1107
+ if field.is_extension:
1108
+ if (not self._allow_multiple_scalars and
1109
+ not self._IsProto3Syntax(message) and
1110
+ message.HasExtension(field)):
1111
+ raise tokenizer.ParseErrorPreviousToken(
1112
+ 'Message type "%s" should not have multiple "%s" extensions.' %
1113
+ (message.DESCRIPTOR.full_name, field.full_name))
1114
+ else:
1115
+ message.Extensions[field] = value
1116
+ else:
1117
+ duplicate_error = False
1118
+ if not self._allow_multiple_scalars:
1119
+ if self._IsProto3Syntax(message):
1120
+ # Proto3 doesn't represent presence so we try best effort to check
1121
+ # multiple scalars by compare to default values.
1122
+ duplicate_error = bool(getattr(message, field.name))
1123
+ else:
1124
+ duplicate_error = message.HasField(field.name)
1125
+
1126
+ if duplicate_error:
1127
+ raise tokenizer.ParseErrorPreviousToken(
1128
+ 'Message type "%s" should not have multiple "%s" fields.' %
1129
+ (message.DESCRIPTOR.full_name, field.name))
1130
+ else:
1131
+ setattr(message, field.name, value)
1132
+
1133
+
1134
+ def _SkipFieldContents(tokenizer):
1135
+ """Skips over contents (value or message) of a field.
1136
+
1137
+ Args:
1138
+ tokenizer: A tokenizer to parse the field name and values.
1139
+ """
1140
+ # Try to guess the type of this field.
1141
+ # If this field is not a message, there should be a ":" between the
1142
+ # field name and the field value and also the field value should not
1143
+ # start with "{" or "<" which indicates the beginning of a message body.
1144
+ # If there is no ":" or there is a "{" or "<" after ":", this field has
1145
+ # to be a message or the input is ill-formed.
1146
+ if tokenizer.TryConsume(':') and not tokenizer.LookingAt(
1147
+ '{') and not tokenizer.LookingAt('<'):
1148
+ _SkipFieldValue(tokenizer)
1149
+ else:
1150
+ _SkipFieldMessage(tokenizer)
1151
+
1152
+
1153
+ def _SkipField(tokenizer):
1154
+ """Skips over a complete field (name and value/message).
1155
+
1156
+ Args:
1157
+ tokenizer: A tokenizer to parse the field name and values.
1158
+ """
1159
+ if tokenizer.TryConsume('['):
1160
+ # Consume extension name.
1161
+ tokenizer.ConsumeIdentifier()
1162
+ while tokenizer.TryConsume('.'):
1163
+ tokenizer.ConsumeIdentifier()
1164
+ tokenizer.Consume(']')
1165
+ else:
1166
+ tokenizer.ConsumeIdentifierOrNumber()
1167
+
1168
+ _SkipFieldContents(tokenizer)
1169
+
1170
+ # For historical reasons, fields may optionally be separated by commas or
1171
+ # semicolons.
1172
+ if not tokenizer.TryConsume(','):
1173
+ tokenizer.TryConsume(';')
1174
+
1175
+
1176
+ def _SkipFieldMessage(tokenizer):
1177
+ """Skips over a field message.
1178
+
1179
+ Args:
1180
+ tokenizer: A tokenizer to parse the field name and values.
1181
+ """
1182
+
1183
+ if tokenizer.TryConsume('<'):
1184
+ delimiter = '>'
1185
+ else:
1186
+ tokenizer.Consume('{')
1187
+ delimiter = '}'
1188
+
1189
+ while not tokenizer.LookingAt('>') and not tokenizer.LookingAt('}'):
1190
+ _SkipField(tokenizer)
1191
+
1192
+ tokenizer.Consume(delimiter)
1193
+
1194
+
1195
+ def _SkipFieldValue(tokenizer):
1196
+ """Skips over a field value.
1197
+
1198
+ Args:
1199
+ tokenizer: A tokenizer to parse the field name and values.
1200
+
1201
+ Raises:
1202
+ ParseError: In case an invalid field value is found.
1203
+ """
1204
+ # String/bytes tokens can come in multiple adjacent string literals.
1205
+ # If we can consume one, consume as many as we can.
1206
+ if tokenizer.TryConsumeByteString():
1207
+ while tokenizer.TryConsumeByteString():
1208
+ pass
1209
+ return
1210
+
1211
+ if (not tokenizer.TryConsumeIdentifier() and
1212
+ not _TryConsumeInt64(tokenizer) and not _TryConsumeUint64(tokenizer) and
1213
+ not tokenizer.TryConsumeFloat()):
1214
+ raise ParseError('Invalid field value: ' + tokenizer.token)
1215
+
1216
+
1217
+ class Tokenizer(object):
1218
+ """Protocol buffer text representation tokenizer.
1219
+
1220
+ This class handles the lower level string parsing by splitting it into
1221
+ meaningful tokens.
1222
+
1223
+ It was directly ported from the Java protocol buffer API.
1224
+ """
1225
+
1226
+ _WHITESPACE = re.compile(r'\s+')
1227
+ _COMMENT = re.compile(r'(\s*#.*$)', re.MULTILINE)
1228
+ _WHITESPACE_OR_COMMENT = re.compile(r'(\s|(#.*$))+', re.MULTILINE)
1229
+ _TOKEN = re.compile('|'.join([
1230
+ r'[a-zA-Z_][0-9a-zA-Z_+-]*', # an identifier
1231
+ r'([0-9+-]|(\.[0-9]))[0-9a-zA-Z_.+-]*', # a number
1232
+ ] + [ # quoted str for each quote mark
1233
+ # Avoid backtracking! https://stackoverflow.com/a/844267
1234
+ r'{qt}[^{qt}\n\\]*((\\.)+[^{qt}\n\\]*)*({qt}|\\?$)'.format(qt=mark)
1235
+ for mark in _QUOTES
1236
+ ]))
1237
+
1238
+ _IDENTIFIER = re.compile(r'[^\d\W]\w*')
1239
+ _IDENTIFIER_OR_NUMBER = re.compile(r'\w+')
1240
+
1241
+ def __init__(self, lines, skip_comments=True):
1242
+ self._position = 0
1243
+ self._line = -1
1244
+ self._column = 0
1245
+ self._token_start = None
1246
+ self.token = ''
1247
+ self._lines = iter(lines)
1248
+ self._current_line = ''
1249
+ self._previous_line = 0
1250
+ self._previous_column = 0
1251
+ self._more_lines = True
1252
+ self._skip_comments = skip_comments
1253
+ self._whitespace_pattern = (skip_comments and self._WHITESPACE_OR_COMMENT
1254
+ or self._WHITESPACE)
1255
+ self._SkipWhitespace()
1256
+ self.NextToken()
1257
+
1258
+ def LookingAt(self, token):
1259
+ return self.token == token
1260
+
1261
+ def AtEnd(self):
1262
+ """Checks the end of the text was reached.
1263
+
1264
+ Returns:
1265
+ True iff the end was reached.
1266
+ """
1267
+ return not self.token
1268
+
1269
+ def _PopLine(self):
1270
+ while len(self._current_line) <= self._column:
1271
+ try:
1272
+ self._current_line = next(self._lines)
1273
+ except StopIteration:
1274
+ self._current_line = ''
1275
+ self._more_lines = False
1276
+ return
1277
+ else:
1278
+ self._line += 1
1279
+ self._column = 0
1280
+
1281
+ def _SkipWhitespace(self):
1282
+ while True:
1283
+ self._PopLine()
1284
+ match = self._whitespace_pattern.match(self._current_line, self._column)
1285
+ if not match:
1286
+ break
1287
+ length = len(match.group(0))
1288
+ self._column += length
1289
+
1290
+ def TryConsume(self, token):
1291
+ """Tries to consume a given piece of text.
1292
+
1293
+ Args:
1294
+ token: Text to consume.
1295
+
1296
+ Returns:
1297
+ True iff the text was consumed.
1298
+ """
1299
+ if self.token == token:
1300
+ self.NextToken()
1301
+ return True
1302
+ return False
1303
+
1304
+ def Consume(self, token):
1305
+ """Consumes a piece of text.
1306
+
1307
+ Args:
1308
+ token: Text to consume.
1309
+
1310
+ Raises:
1311
+ ParseError: If the text couldn't be consumed.
1312
+ """
1313
+ if not self.TryConsume(token):
1314
+ raise self.ParseError('Expected "%s".' % token)
1315
+
1316
+ def ConsumeComment(self):
1317
+ result = self.token
1318
+ if not self._COMMENT.match(result):
1319
+ raise self.ParseError('Expected comment.')
1320
+ self.NextToken()
1321
+ return result
1322
+
1323
+ def ConsumeCommentOrTrailingComment(self):
1324
+ """Consumes a comment, returns a 2-tuple (trailing bool, comment str)."""
1325
+
1326
+ # Tokenizer initializes _previous_line and _previous_column to 0. As the
1327
+ # tokenizer starts, it looks like there is a previous token on the line.
1328
+ just_started = self._line == 0 and self._column == 0
1329
+
1330
+ before_parsing = self._previous_line
1331
+ comment = self.ConsumeComment()
1332
+
1333
+ # A trailing comment is a comment on the same line than the previous token.
1334
+ trailing = (self._previous_line == before_parsing
1335
+ and not just_started)
1336
+
1337
+ return trailing, comment
1338
+
1339
+ def TryConsumeIdentifier(self):
1340
+ try:
1341
+ self.ConsumeIdentifier()
1342
+ return True
1343
+ except ParseError:
1344
+ return False
1345
+
1346
+ def ConsumeIdentifier(self):
1347
+ """Consumes protocol message field identifier.
1348
+
1349
+ Returns:
1350
+ Identifier string.
1351
+
1352
+ Raises:
1353
+ ParseError: If an identifier couldn't be consumed.
1354
+ """
1355
+ result = self.token
1356
+ if not self._IDENTIFIER.match(result):
1357
+ raise self.ParseError('Expected identifier.')
1358
+ self.NextToken()
1359
+ return result
1360
+
1361
+ def TryConsumeIdentifierOrNumber(self):
1362
+ try:
1363
+ self.ConsumeIdentifierOrNumber()
1364
+ return True
1365
+ except ParseError:
1366
+ return False
1367
+
1368
+ def ConsumeIdentifierOrNumber(self):
1369
+ """Consumes protocol message field identifier.
1370
+
1371
+ Returns:
1372
+ Identifier string.
1373
+
1374
+ Raises:
1375
+ ParseError: If an identifier couldn't be consumed.
1376
+ """
1377
+ result = self.token
1378
+ if not self._IDENTIFIER_OR_NUMBER.match(result):
1379
+ raise self.ParseError('Expected identifier or number, got %s.' % result)
1380
+ self.NextToken()
1381
+ return result
1382
+
1383
+ def TryConsumeInteger(self):
1384
+ try:
1385
+ self.ConsumeInteger()
1386
+ return True
1387
+ except ParseError:
1388
+ return False
1389
+
1390
+ def ConsumeInteger(self):
1391
+ """Consumes an integer number.
1392
+
1393
+ Returns:
1394
+ The integer parsed.
1395
+
1396
+ Raises:
1397
+ ParseError: If an integer couldn't be consumed.
1398
+ """
1399
+ try:
1400
+ result = _ParseAbstractInteger(self.token)
1401
+ except ValueError as e:
1402
+ raise self.ParseError(str(e))
1403
+ self.NextToken()
1404
+ return result
1405
+
1406
+ def TryConsumeFloat(self):
1407
+ try:
1408
+ self.ConsumeFloat()
1409
+ return True
1410
+ except ParseError:
1411
+ return False
1412
+
1413
+ def ConsumeFloat(self):
1414
+ """Consumes an floating point number.
1415
+
1416
+ Returns:
1417
+ The number parsed.
1418
+
1419
+ Raises:
1420
+ ParseError: If a floating point number couldn't be consumed.
1421
+ """
1422
+ try:
1423
+ result = ParseFloat(self.token)
1424
+ except ValueError as e:
1425
+ raise self.ParseError(str(e))
1426
+ self.NextToken()
1427
+ return result
1428
+
1429
+ def ConsumeBool(self):
1430
+ """Consumes a boolean value.
1431
+
1432
+ Returns:
1433
+ The bool parsed.
1434
+
1435
+ Raises:
1436
+ ParseError: If a boolean value couldn't be consumed.
1437
+ """
1438
+ try:
1439
+ result = ParseBool(self.token)
1440
+ except ValueError as e:
1441
+ raise self.ParseError(str(e))
1442
+ self.NextToken()
1443
+ return result
1444
+
1445
+ def TryConsumeByteString(self):
1446
+ try:
1447
+ self.ConsumeByteString()
1448
+ return True
1449
+ except ParseError:
1450
+ return False
1451
+
1452
+ def ConsumeString(self):
1453
+ """Consumes a string value.
1454
+
1455
+ Returns:
1456
+ The string parsed.
1457
+
1458
+ Raises:
1459
+ ParseError: If a string value couldn't be consumed.
1460
+ """
1461
+ the_bytes = self.ConsumeByteString()
1462
+ try:
1463
+ return str(the_bytes, 'utf-8')
1464
+ except UnicodeDecodeError as e:
1465
+ raise self._StringParseError(e)
1466
+
1467
+ def ConsumeByteString(self):
1468
+ """Consumes a byte array value.
1469
+
1470
+ Returns:
1471
+ The array parsed (as a string).
1472
+
1473
+ Raises:
1474
+ ParseError: If a byte array value couldn't be consumed.
1475
+ """
1476
+ the_list = [self._ConsumeSingleByteString()]
1477
+ while self.token and self.token[0] in _QUOTES:
1478
+ the_list.append(self._ConsumeSingleByteString())
1479
+ return b''.join(the_list)
1480
+
1481
+ def _ConsumeSingleByteString(self):
1482
+ """Consume one token of a string literal.
1483
+
1484
+ String literals (whether bytes or text) can come in multiple adjacent
1485
+ tokens which are automatically concatenated, like in C or Python. This
1486
+ method only consumes one token.
1487
+
1488
+ Returns:
1489
+ The token parsed.
1490
+ Raises:
1491
+ ParseError: When the wrong format data is found.
1492
+ """
1493
+ text = self.token
1494
+ if len(text) < 1 or text[0] not in _QUOTES:
1495
+ raise self.ParseError('Expected string but found: %r' % (text,))
1496
+
1497
+ if len(text) < 2 or text[-1] != text[0]:
1498
+ raise self.ParseError('String missing ending quote: %r' % (text,))
1499
+
1500
+ try:
1501
+ result = text_encoding.CUnescape(text[1:-1])
1502
+ except ValueError as e:
1503
+ raise self.ParseError(str(e))
1504
+ self.NextToken()
1505
+ return result
1506
+
1507
+ def ConsumeEnum(self, field):
1508
+ try:
1509
+ result = ParseEnum(field, self.token)
1510
+ except ValueError as e:
1511
+ raise self.ParseError(str(e))
1512
+ self.NextToken()
1513
+ return result
1514
+
1515
+ def ParseErrorPreviousToken(self, message):
1516
+ """Creates and *returns* a ParseError for the previously read token.
1517
+
1518
+ Args:
1519
+ message: A message to set for the exception.
1520
+
1521
+ Returns:
1522
+ A ParseError instance.
1523
+ """
1524
+ return ParseError(message, self._previous_line + 1,
1525
+ self._previous_column + 1)
1526
+
1527
+ def ParseError(self, message):
1528
+ """Creates and *returns* a ParseError for the current token."""
1529
+ return ParseError('\'' + self._current_line + '\': ' + message,
1530
+ self._line + 1, self._column + 1)
1531
+
1532
+ def _StringParseError(self, e):
1533
+ return self.ParseError('Couldn\'t parse string: ' + str(e))
1534
+
1535
+ def NextToken(self):
1536
+ """Reads the next meaningful token."""
1537
+ self._previous_line = self._line
1538
+ self._previous_column = self._column
1539
+
1540
+ self._column += len(self.token)
1541
+ self._SkipWhitespace()
1542
+
1543
+ if not self._more_lines:
1544
+ self.token = ''
1545
+ return
1546
+
1547
+ match = self._TOKEN.match(self._current_line, self._column)
1548
+ if not match and not self._skip_comments:
1549
+ match = self._COMMENT.match(self._current_line, self._column)
1550
+ if match:
1551
+ token = match.group(0)
1552
+ self.token = token
1553
+ else:
1554
+ self.token = self._current_line[self._column]
1555
+
1556
+ # Aliased so it can still be accessed by current visibility violators.
1557
+ # TODO(dbarnett): Migrate violators to textformat_tokenizer.
1558
+ _Tokenizer = Tokenizer # pylint: disable=invalid-name
1559
+
1560
+
1561
+ def _ConsumeInt32(tokenizer):
1562
+ """Consumes a signed 32bit integer number from tokenizer.
1563
+
1564
+ Args:
1565
+ tokenizer: A tokenizer used to parse the number.
1566
+
1567
+ Returns:
1568
+ The integer parsed.
1569
+
1570
+ Raises:
1571
+ ParseError: If a signed 32bit integer couldn't be consumed.
1572
+ """
1573
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
1574
+
1575
+
1576
+ def _ConsumeUint32(tokenizer):
1577
+ """Consumes an unsigned 32bit integer number from tokenizer.
1578
+
1579
+ Args:
1580
+ tokenizer: A tokenizer used to parse the number.
1581
+
1582
+ Returns:
1583
+ The integer parsed.
1584
+
1585
+ Raises:
1586
+ ParseError: If an unsigned 32bit integer couldn't be consumed.
1587
+ """
1588
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=False)
1589
+
1590
+
1591
+ def _TryConsumeInt64(tokenizer):
1592
+ try:
1593
+ _ConsumeInt64(tokenizer)
1594
+ return True
1595
+ except ParseError:
1596
+ return False
1597
+
1598
+
1599
+ def _ConsumeInt64(tokenizer):
1600
+ """Consumes a signed 32bit integer number from tokenizer.
1601
+
1602
+ Args:
1603
+ tokenizer: A tokenizer used to parse the number.
1604
+
1605
+ Returns:
1606
+ The integer parsed.
1607
+
1608
+ Raises:
1609
+ ParseError: If a signed 32bit integer couldn't be consumed.
1610
+ """
1611
+ return _ConsumeInteger(tokenizer, is_signed=True, is_long=True)
1612
+
1613
+
1614
+ def _TryConsumeUint64(tokenizer):
1615
+ try:
1616
+ _ConsumeUint64(tokenizer)
1617
+ return True
1618
+ except ParseError:
1619
+ return False
1620
+
1621
+
1622
+ def _ConsumeUint64(tokenizer):
1623
+ """Consumes an unsigned 64bit integer number from tokenizer.
1624
+
1625
+ Args:
1626
+ tokenizer: A tokenizer used to parse the number.
1627
+
1628
+ Returns:
1629
+ The integer parsed.
1630
+
1631
+ Raises:
1632
+ ParseError: If an unsigned 64bit integer couldn't be consumed.
1633
+ """
1634
+ return _ConsumeInteger(tokenizer, is_signed=False, is_long=True)
1635
+
1636
+
1637
+ def _ConsumeInteger(tokenizer, is_signed=False, is_long=False):
1638
+ """Consumes an integer number from tokenizer.
1639
+
1640
+ Args:
1641
+ tokenizer: A tokenizer used to parse the number.
1642
+ is_signed: True if a signed integer must be parsed.
1643
+ is_long: True if a long integer must be parsed.
1644
+
1645
+ Returns:
1646
+ The integer parsed.
1647
+
1648
+ Raises:
1649
+ ParseError: If an integer with given characteristics couldn't be consumed.
1650
+ """
1651
+ try:
1652
+ result = ParseInteger(tokenizer.token, is_signed=is_signed, is_long=is_long)
1653
+ except ValueError as e:
1654
+ raise tokenizer.ParseError(str(e))
1655
+ tokenizer.NextToken()
1656
+ return result
1657
+
1658
+
1659
+ def ParseInteger(text, is_signed=False, is_long=False):
1660
+ """Parses an integer.
1661
+
1662
+ Args:
1663
+ text: The text to parse.
1664
+ is_signed: True if a signed integer must be parsed.
1665
+ is_long: True if a long integer must be parsed.
1666
+
1667
+ Returns:
1668
+ The integer value.
1669
+
1670
+ Raises:
1671
+ ValueError: Thrown Iff the text is not a valid integer.
1672
+ """
1673
+ # Do the actual parsing. Exception handling is propagated to caller.
1674
+ result = _ParseAbstractInteger(text)
1675
+
1676
+ # Check if the integer is sane. Exceptions handled by callers.
1677
+ checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
1678
+ checker.CheckValue(result)
1679
+ return result
1680
+
1681
+
1682
+ def _ParseAbstractInteger(text):
1683
+ """Parses an integer without checking size/signedness.
1684
+
1685
+ Args:
1686
+ text: The text to parse.
1687
+
1688
+ Returns:
1689
+ The integer value.
1690
+
1691
+ Raises:
1692
+ ValueError: Thrown Iff the text is not a valid integer.
1693
+ """
1694
+ # Do the actual parsing. Exception handling is propagated to caller.
1695
+ orig_text = text
1696
+ c_octal_match = re.match(r'(-?)0(\d+)$', text)
1697
+ if c_octal_match:
1698
+ # Python 3 no longer supports 0755 octal syntax without the 'o', so
1699
+ # we always use the '0o' prefix for multi-digit numbers starting with 0.
1700
+ text = c_octal_match.group(1) + '0o' + c_octal_match.group(2)
1701
+ try:
1702
+ return int(text, 0)
1703
+ except ValueError:
1704
+ raise ValueError('Couldn\'t parse integer: %s' % orig_text)
1705
+
1706
+
1707
+ def ParseFloat(text):
1708
+ """Parse a floating point number.
1709
+
1710
+ Args:
1711
+ text: Text to parse.
1712
+
1713
+ Returns:
1714
+ The number parsed.
1715
+
1716
+ Raises:
1717
+ ValueError: If a floating point number couldn't be parsed.
1718
+ """
1719
+ try:
1720
+ # Assume Python compatible syntax.
1721
+ return float(text)
1722
+ except ValueError:
1723
+ # Check alternative spellings.
1724
+ if _FLOAT_INFINITY.match(text):
1725
+ if text[0] == '-':
1726
+ return float('-inf')
1727
+ else:
1728
+ return float('inf')
1729
+ elif _FLOAT_NAN.match(text):
1730
+ return float('nan')
1731
+ else:
1732
+ # assume '1.0f' format
1733
+ try:
1734
+ return float(text.rstrip('f'))
1735
+ except ValueError:
1736
+ raise ValueError('Couldn\'t parse float: %s' % text)
1737
+
1738
+
1739
+ def ParseBool(text):
1740
+ """Parse a boolean value.
1741
+
1742
+ Args:
1743
+ text: Text to parse.
1744
+
1745
+ Returns:
1746
+ Boolean values parsed
1747
+
1748
+ Raises:
1749
+ ValueError: If text is not a valid boolean.
1750
+ """
1751
+ if text in ('true', 't', '1', 'True'):
1752
+ return True
1753
+ elif text in ('false', 'f', '0', 'False'):
1754
+ return False
1755
+ else:
1756
+ raise ValueError('Expected "true" or "false".')
1757
+
1758
+
1759
+ def ParseEnum(field, value):
1760
+ """Parse an enum value.
1761
+
1762
+ The value can be specified by a number (the enum value), or by
1763
+ a string literal (the enum name).
1764
+
1765
+ Args:
1766
+ field: Enum field descriptor.
1767
+ value: String value.
1768
+
1769
+ Returns:
1770
+ Enum value number.
1771
+
1772
+ Raises:
1773
+ ValueError: If the enum value could not be parsed.
1774
+ """
1775
+ enum_descriptor = field.enum_type
1776
+ try:
1777
+ number = int(value, 0)
1778
+ except ValueError:
1779
+ # Identifier.
1780
+ enum_value = enum_descriptor.values_by_name.get(value, None)
1781
+ if enum_value is None:
1782
+ raise ValueError('Enum type "%s" has no value named %s.' %
1783
+ (enum_descriptor.full_name, value))
1784
+ else:
1785
+ # Numeric value.
1786
+ if hasattr(field.file, 'syntax'):
1787
+ # Attribute is checked for compatibility.
1788
+ if field.file.syntax == 'proto3':
1789
+ # Proto3 accept numeric unknown enums.
1790
+ return number
1791
+ enum_value = enum_descriptor.values_by_number.get(number, None)
1792
+ if enum_value is None:
1793
+ raise ValueError('Enum type "%s" has no value with number %d.' %
1794
+ (enum_descriptor.full_name, number))
1795
+ return enum_value.number
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/config/__pycache__/__init__.cpython-38.pyc ADDED
Binary file (430 Bytes). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/config/__pycache__/extensions.cpython-38.pyc ADDED
Binary file (20.3 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/config/__pycache__/plugins.cpython-38.pyc ADDED
Binary file (11.1 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/fetching.cpython-38.pyc ADDED
Binary file (6.86 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/findlib.cpython-38.pyc ADDED
Binary file (4.19 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/format.cpython-38.pyc ADDED
Binary file (28.9 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/imopen.cpython-38.pyc ADDED
Binary file (7.27 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/util.cpython-38.pyc ADDED
Binary file (16.3 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/__pycache__/v3_plugin_api.cpython-38.pyc ADDED
Binary file (16.8 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/fetching.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # Based on code from the vispy project
3
+ # Distributed under the (new) BSD License. See LICENSE.txt for more info.
4
+
5
+ """Data downloading and reading functions
6
+ """
7
+
8
+ from math import log
9
+ import os
10
+ from os import path as op
11
+ import sys
12
+ import shutil
13
+ import time
14
+
15
+ from . import appdata_dir, resource_dirs
16
+ from . import StdoutProgressIndicator, urlopen
17
+
18
+
19
+ class InternetNotAllowedError(IOError):
20
+ """Plugins that need resources can just use get_remote_file(), but
21
+ should catch this error and silently ignore it.
22
+ """
23
+
24
+ pass
25
+
26
+
27
+ class NeedDownloadError(IOError):
28
+ """Is raised when a remote file is requested that is not locally
29
+ available, but which needs to be explicitly downloaded by the user.
30
+ """
31
+
32
+
33
+ def get_remote_file(fname, directory=None, force_download=False, auto=True):
34
+ """Get a the filename for the local version of a file from the web
35
+
36
+ Parameters
37
+ ----------
38
+ fname : str
39
+ The relative filename on the remote data repository to download.
40
+ These correspond to paths on
41
+ ``https://github.com/imageio/imageio-binaries/``.
42
+ directory : str | None
43
+ The directory where the file will be cached if a download was
44
+ required to obtain the file. By default, the appdata directory
45
+ is used. This is also the first directory that is checked for
46
+ a local version of the file. If the directory does not exist,
47
+ it will be created.
48
+ force_download : bool | str
49
+ If True, the file will be downloaded even if a local copy exists
50
+ (and this copy will be overwritten). Can also be a YYYY-MM-DD date
51
+ to ensure a file is up-to-date (modified date of a file on disk,
52
+ if present, is checked).
53
+ auto : bool
54
+ Whether to auto-download the file if its not present locally. Default
55
+ True. If False and a download is needed, raises NeedDownloadError.
56
+
57
+ Returns
58
+ -------
59
+ fname : str
60
+ The path to the file on the local system.
61
+ """
62
+ _url_root = "https://github.com/imageio/imageio-binaries/raw/master/"
63
+ url = _url_root + fname
64
+ nfname = op.normcase(fname) # convert to native
65
+ # Get dirs to look for the resource
66
+ given_directory = directory
67
+ directory = given_directory or appdata_dir("imageio")
68
+ dirs = resource_dirs()
69
+ dirs.insert(0, directory) # Given dir has preference
70
+ # Try to find the resource locally
71
+ for dir in dirs:
72
+ filename = op.join(dir, nfname)
73
+ if op.isfile(filename):
74
+ if not force_download: # we're done
75
+ if given_directory and given_directory != dir:
76
+ filename2 = os.path.join(given_directory, nfname)
77
+ # Make sure the output directory exists
78
+ if not op.isdir(op.dirname(filename2)):
79
+ os.makedirs(op.abspath(op.dirname(filename2)))
80
+ shutil.copy(filename, filename2)
81
+ return filename2
82
+ return filename
83
+ if isinstance(force_download, str):
84
+ ntime = time.strptime(force_download, "%Y-%m-%d")
85
+ ftime = time.gmtime(op.getctime(filename))
86
+ if ftime >= ntime:
87
+ if given_directory and given_directory != dir:
88
+ filename2 = os.path.join(given_directory, nfname)
89
+ # Make sure the output directory exists
90
+ if not op.isdir(op.dirname(filename2)):
91
+ os.makedirs(op.abspath(op.dirname(filename2)))
92
+ shutil.copy(filename, filename2)
93
+ return filename2
94
+ return filename
95
+ else:
96
+ print("File older than %s, updating..." % force_download)
97
+ break
98
+
99
+ # If we get here, we're going to try to download the file
100
+ if os.getenv("IMAGEIO_NO_INTERNET", "").lower() in ("1", "true", "yes"):
101
+ raise InternetNotAllowedError(
102
+ "Will not download resource from the "
103
+ "internet because environment variable "
104
+ "IMAGEIO_NO_INTERNET is set."
105
+ )
106
+
107
+ # Can we proceed with auto-download?
108
+ if not auto:
109
+ raise NeedDownloadError()
110
+
111
+ # Get filename to store to and make sure the dir exists
112
+ filename = op.join(directory, nfname)
113
+ if not op.isdir(op.dirname(filename)):
114
+ os.makedirs(op.abspath(op.dirname(filename)))
115
+ # let's go get the file
116
+ if os.getenv("CONTINUOUS_INTEGRATION", False): # pragma: no cover
117
+ # On CI, we retry a few times ...
118
+ for i in range(2):
119
+ try:
120
+ _fetch_file(url, filename)
121
+ return filename
122
+ except IOError:
123
+ time.sleep(0.5)
124
+ else:
125
+ _fetch_file(url, filename)
126
+ return filename
127
+ else: # pragma: no cover
128
+ _fetch_file(url, filename)
129
+ return filename
130
+
131
+
132
+ def _fetch_file(url, file_name, print_destination=True):
133
+ """Load requested file, downloading it if needed or requested
134
+
135
+ Parameters
136
+ ----------
137
+ url: string
138
+ The url of file to be downloaded.
139
+ file_name: string
140
+ Name, along with the path, of where downloaded file will be saved.
141
+ print_destination: bool, optional
142
+ If true, destination of where file was saved will be printed after
143
+ download finishes.
144
+ resume: bool, optional
145
+ If true, try to resume partially downloaded files.
146
+ """
147
+ # Adapted from NISL:
148
+ # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
149
+
150
+ print(
151
+ "Imageio: %r was not found on your computer; "
152
+ "downloading it now." % os.path.basename(file_name)
153
+ )
154
+
155
+ temp_file_name = file_name + ".part"
156
+ local_file = None
157
+ initial_size = 0
158
+ errors = []
159
+ for tries in range(4):
160
+ try:
161
+ # Checking file size and displaying it alongside the download url
162
+ remote_file = urlopen(url, timeout=5.0)
163
+ file_size = int(remote_file.headers["Content-Length"].strip())
164
+ size_str = _sizeof_fmt(file_size)
165
+ print("Try %i. Download from %s (%s)" % (tries + 1, url, size_str))
166
+ # Downloading data (can be extended to resume if need be)
167
+ local_file = open(temp_file_name, "wb")
168
+ _chunk_read(remote_file, local_file, initial_size=initial_size)
169
+ # temp file must be closed prior to the move
170
+ if not local_file.closed:
171
+ local_file.close()
172
+ shutil.move(temp_file_name, file_name)
173
+ if print_destination is True:
174
+ sys.stdout.write("File saved as %s.\n" % file_name)
175
+ break
176
+ except Exception as e:
177
+ errors.append(e)
178
+ print("Error while fetching file: %s." % str(e))
179
+ finally:
180
+ if local_file is not None:
181
+ if not local_file.closed:
182
+ local_file.close()
183
+ else:
184
+ raise IOError(
185
+ "Unable to download %r. Perhaps there is no internet "
186
+ "connection? If there is, please report this problem."
187
+ % os.path.basename(file_name)
188
+ )
189
+
190
+
191
+ def _chunk_read(response, local_file, chunk_size=8192, initial_size=0):
192
+ """Download a file chunk by chunk and show advancement
193
+
194
+ Can also be used when resuming downloads over http.
195
+
196
+ Parameters
197
+ ----------
198
+ response: urllib.response.addinfourl
199
+ Response to the download request in order to get file size.
200
+ local_file: file
201
+ Hard disk file where data should be written.
202
+ chunk_size: integer, optional
203
+ Size of downloaded chunks. Default: 8192
204
+ initial_size: int, optional
205
+ If resuming, indicate the initial size of the file.
206
+ """
207
+ # Adapted from NISL:
208
+ # https://github.com/nisl/tutorial/blob/master/nisl/datasets.py
209
+
210
+ bytes_so_far = initial_size
211
+ # Returns only amount left to download when resuming, not the size of the
212
+ # entire file
213
+ total_size = int(response.headers["Content-Length"].strip())
214
+ total_size += initial_size
215
+
216
+ progress = StdoutProgressIndicator("Downloading")
217
+ progress.start("", "bytes", total_size)
218
+
219
+ while True:
220
+ chunk = response.read(chunk_size)
221
+ bytes_so_far += len(chunk)
222
+ if not chunk:
223
+ break
224
+ _chunk_write(chunk, local_file, progress)
225
+ progress.finish("Done")
226
+
227
+
228
+ def _chunk_write(chunk, local_file, progress):
229
+ """Write a chunk to file and update the progress bar"""
230
+ local_file.write(chunk)
231
+ progress.increase_progress(len(chunk))
232
+ time.sleep(0) # Give other threads a chance, e.g. those that handle stdout pipes
233
+
234
+
235
+ def _sizeof_fmt(num):
236
+ """Turn number of bytes into human-readable str"""
237
+ units = ["bytes", "kB", "MB", "GB", "TB", "PB"]
238
+ decimals = [0, 0, 1, 2, 2, 2]
239
+ """Human friendly file size"""
240
+ if num > 1:
241
+ exponent = min(int(log(num, 1024)), len(units) - 1)
242
+ quotient = float(num) / 1024**exponent
243
+ unit = units[exponent]
244
+ num_decimals = decimals[exponent]
245
+ format_string = "{0:.%sf} {1}" % num_decimals
246
+ return format_string.format(quotient, unit)
247
+ return "0 bytes" if num == 0 else "1 byte"
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/core/util.py ADDED
@@ -0,0 +1,559 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ # imageio is distributed under the terms of the (new) BSD License.
3
+
4
+ """
5
+ Various utilities for imageio
6
+ """
7
+
8
+
9
+ from collections import OrderedDict
10
+ import numpy as np
11
+ import os
12
+ import re
13
+ import struct
14
+ import sys
15
+ import time
16
+ import logging
17
+
18
+
19
+ logger = logging.getLogger("imageio")
20
+
21
+ IS_PYPY = "__pypy__" in sys.builtin_module_names
22
+ THIS_DIR = os.path.abspath(os.path.dirname(__file__))
23
+
24
+
25
+ def urlopen(*args, **kwargs):
26
+ """Compatibility function for the urlopen function. Raises an
27
+ RuntimeError if urlopen could not be imported (which can occur in
28
+ frozen applications.
29
+ """
30
+ try:
31
+ from urllib.request import urlopen
32
+ except ImportError:
33
+ raise RuntimeError("Could not import urlopen.")
34
+ return urlopen(*args, **kwargs)
35
+
36
+
37
+ def _precision_warn(p1, p2, extra=""):
38
+ t = (
39
+ "Lossy conversion from {} to {}. {} Convert image to {} prior to "
40
+ "saving to suppress this warning."
41
+ )
42
+ logger.warning(t.format(p1, p2, extra, p2))
43
+
44
+
45
+ def image_as_uint(im, bitdepth=None):
46
+ """Convert the given image to uint (default: uint8)
47
+
48
+ If the dtype already matches the desired format, it is returned
49
+ as-is. If the image is float, and all values are between 0 and 1,
50
+ the values are multiplied by np.power(2.0, bitdepth). In all other
51
+ situations, the values are scaled such that the minimum value
52
+ becomes 0 and the maximum value becomes np.power(2.0, bitdepth)-1
53
+ (255 for 8-bit and 65535 for 16-bit).
54
+ """
55
+ if not bitdepth:
56
+ bitdepth = 8
57
+ if not isinstance(im, np.ndarray):
58
+ raise ValueError("Image must be a numpy array")
59
+ if bitdepth == 8:
60
+ out_type = np.uint8
61
+ elif bitdepth == 16:
62
+ out_type = np.uint16
63
+ else:
64
+ raise ValueError("Bitdepth must be either 8 or 16")
65
+ dtype_str1 = str(im.dtype)
66
+ dtype_str2 = out_type.__name__
67
+ if (im.dtype == np.uint8 and bitdepth == 8) or (
68
+ im.dtype == np.uint16 and bitdepth == 16
69
+ ):
70
+ # Already the correct format? Return as-is
71
+ return im
72
+ if dtype_str1.startswith("float") and np.nanmin(im) >= 0 and np.nanmax(im) <= 1:
73
+ _precision_warn(dtype_str1, dtype_str2, "Range [0, 1].")
74
+ im = im.astype(np.float64) * (np.power(2.0, bitdepth) - 1) + 0.499999999
75
+ elif im.dtype == np.uint16 and bitdepth == 8:
76
+ _precision_warn(dtype_str1, dtype_str2, "Losing 8 bits of resolution.")
77
+ im = np.right_shift(im, 8)
78
+ elif im.dtype == np.uint32:
79
+ _precision_warn(
80
+ dtype_str1,
81
+ dtype_str2,
82
+ "Losing {} bits of resolution.".format(32 - bitdepth),
83
+ )
84
+ im = np.right_shift(im, 32 - bitdepth)
85
+ elif im.dtype == np.uint64:
86
+ _precision_warn(
87
+ dtype_str1,
88
+ dtype_str2,
89
+ "Losing {} bits of resolution.".format(64 - bitdepth),
90
+ )
91
+ im = np.right_shift(im, 64 - bitdepth)
92
+ else:
93
+ mi = np.nanmin(im)
94
+ ma = np.nanmax(im)
95
+ if not np.isfinite(mi):
96
+ raise ValueError("Minimum image value is not finite")
97
+ if not np.isfinite(ma):
98
+ raise ValueError("Maximum image value is not finite")
99
+ if ma == mi:
100
+ return im.astype(out_type)
101
+ _precision_warn(dtype_str1, dtype_str2, "Range [{}, {}].".format(mi, ma))
102
+ # Now make float copy before we scale
103
+ im = im.astype("float64")
104
+ # Scale the values between 0 and 1 then multiply by the max value
105
+ im = (im - mi) / (ma - mi) * (np.power(2.0, bitdepth) - 1) + 0.499999999
106
+ assert np.nanmin(im) >= 0
107
+ assert np.nanmax(im) < np.power(2.0, bitdepth)
108
+ return im.astype(out_type)
109
+
110
+
111
+ class Array(np.ndarray):
112
+ """Array(array, meta=None)
113
+
114
+ A subclass of np.ndarray that has a meta attribute. Get the dictionary
115
+ that contains the meta data using ``im.meta``. Convert to a plain numpy
116
+ array using ``np.asarray(im)``.
117
+
118
+ """
119
+
120
+ def __new__(cls, array, meta=None):
121
+ # Check
122
+ if not isinstance(array, np.ndarray):
123
+ raise ValueError("Array expects a numpy array.")
124
+ if not (meta is None or isinstance(meta, dict)):
125
+ raise ValueError("Array expects meta data to be a dict.")
126
+ # Convert and return
127
+ meta = meta if meta is not None else getattr(array, "meta", {})
128
+ try:
129
+ ob = array.view(cls)
130
+ except AttributeError: # pragma: no cover
131
+ # Just return the original; no metadata on the array in Pypy!
132
+ return array
133
+ ob._copy_meta(meta)
134
+ return ob
135
+
136
+ def _copy_meta(self, meta):
137
+ """Make a 2-level deep copy of the meta dictionary."""
138
+ self._meta = Dict()
139
+ for key, val in meta.items():
140
+ if isinstance(val, dict):
141
+ val = Dict(val) # Copy this level
142
+ self._meta[key] = val
143
+
144
+ @property
145
+ def meta(self):
146
+ """The dict with the meta data of this image."""
147
+ return self._meta
148
+
149
+ def __array_finalize__(self, ob):
150
+ """So the meta info is maintained when doing calculations with
151
+ the array.
152
+ """
153
+ if isinstance(ob, Array):
154
+ self._copy_meta(ob.meta)
155
+ else:
156
+ self._copy_meta({})
157
+
158
+ def __array_wrap__(self, out, context=None):
159
+ """So that we return a native numpy array (or scalar) when a
160
+ reducting ufunc is applied (such as sum(), std(), etc.)
161
+ """
162
+ if not out.shape:
163
+ return out.dtype.type(out) # Scalar
164
+ elif out.shape != self.shape:
165
+ return out.view(type=np.ndarray)
166
+ else:
167
+ return out # Type Array
168
+
169
+
170
+ Image = Array # Alias for backwards compatibility
171
+
172
+
173
+ def asarray(a):
174
+ """Pypy-safe version of np.asarray. Pypy's np.asarray consumes a
175
+ *lot* of memory if the given array is an ndarray subclass. This
176
+ function does not.
177
+ """
178
+ if isinstance(a, np.ndarray):
179
+ if IS_PYPY: # pragma: no cover
180
+ a = a.copy() # pypy has issues with base views
181
+ plain = a.view(type=np.ndarray)
182
+ return plain
183
+ return np.asarray(a)
184
+
185
+
186
+ class Dict(OrderedDict):
187
+ """A dict in which the keys can be get and set as if they were
188
+ attributes. Very convenient in combination with autocompletion.
189
+
190
+ This Dict still behaves as much as possible as a normal dict, and
191
+ keys can be anything that are otherwise valid keys. However,
192
+ keys that are not valid identifiers or that are names of the dict
193
+ class (such as 'items' and 'copy') cannot be get/set as attributes.
194
+ """
195
+
196
+ __reserved_names__ = dir(OrderedDict()) # Also from OrderedDict
197
+ __pure_names__ = dir(dict())
198
+
199
+ def __getattribute__(self, key):
200
+ try:
201
+ return object.__getattribute__(self, key)
202
+ except AttributeError:
203
+ if key in self:
204
+ return self[key]
205
+ else:
206
+ raise
207
+
208
+ def __setattr__(self, key, val):
209
+ if key in Dict.__reserved_names__:
210
+ # Either let OrderedDict do its work, or disallow
211
+ if key not in Dict.__pure_names__:
212
+ return OrderedDict.__setattr__(self, key, val)
213
+ else:
214
+ raise AttributeError(
215
+ "Reserved name, this key can only "
216
+ + "be set via ``d[%r] = X``" % key
217
+ )
218
+ else:
219
+ # if isinstance(val, dict): val = Dict(val) -> no, makes a copy!
220
+ self[key] = val
221
+
222
+ def __dir__(self):
223
+ def isidentifier(x):
224
+ return bool(re.match(r"[a-z_]\w*$", x, re.I))
225
+
226
+ names = [k for k in self.keys() if (isinstance(k, str) and isidentifier(k))]
227
+ return Dict.__reserved_names__ + names
228
+
229
+
230
+ class BaseProgressIndicator(object):
231
+ """BaseProgressIndicator(name)
232
+
233
+ A progress indicator helps display the progres of a task to the
234
+ user. Progress can be pending, running, finished or failed.
235
+
236
+ Each task has:
237
+ * a name - a short description of what needs to be done.
238
+ * an action - the current action in performing the task (e.g. a subtask)
239
+ * progress - how far the task is completed
240
+ * max - max number of progress units. If 0, the progress is indefinite
241
+ * unit - the units in which the progress is counted
242
+ * status - 0: pending, 1: in progress, 2: finished, 3: failed
243
+
244
+ This class defines an abstract interface. Subclasses should implement
245
+ _start, _stop, _update_progress(progressText), _write(message).
246
+ """
247
+
248
+ def __init__(self, name):
249
+ self._name = name
250
+ self._action = ""
251
+ self._unit = ""
252
+ self._max = 0
253
+ self._status = 0
254
+ self._last_progress_update = 0
255
+
256
+ def start(self, action="", unit="", max=0):
257
+ """start(action='', unit='', max=0)
258
+
259
+ Start the progress. Optionally specify an action, a unit,
260
+ and a maxium progress value.
261
+ """
262
+ if self._status == 1:
263
+ self.finish()
264
+ self._action = action
265
+ self._unit = unit
266
+ self._max = max
267
+ #
268
+ self._progress = 0
269
+ self._status = 1
270
+ self._start()
271
+
272
+ def status(self):
273
+ """status()
274
+
275
+ Get the status of the progress - 0: pending, 1: in progress,
276
+ 2: finished, 3: failed
277
+ """
278
+ return self._status
279
+
280
+ def set_progress(self, progress=0, force=False):
281
+ """set_progress(progress=0, force=False)
282
+
283
+ Set the current progress. To avoid unnecessary progress updates
284
+ this will only have a visual effect if the time since the last
285
+ update is > 0.1 seconds, or if force is True.
286
+ """
287
+ self._progress = progress
288
+ # Update or not?
289
+ if not (force or (time.time() - self._last_progress_update > 0.1)):
290
+ return
291
+ self._last_progress_update = time.time()
292
+ # Compose new string
293
+ unit = self._unit or ""
294
+ progressText = ""
295
+ if unit == "%":
296
+ progressText = "%2.1f%%" % progress
297
+ elif self._max > 0:
298
+ percent = 100 * float(progress) / self._max
299
+ progressText = "%i/%i %s (%2.1f%%)" % (progress, self._max, unit, percent)
300
+ elif progress > 0:
301
+ if isinstance(progress, float):
302
+ progressText = "%0.4g %s" % (progress, unit)
303
+ else:
304
+ progressText = "%i %s" % (progress, unit)
305
+ # Update
306
+ self._update_progress(progressText)
307
+
308
+ def increase_progress(self, extra_progress):
309
+ """increase_progress(extra_progress)
310
+
311
+ Increase the progress by a certain amount.
312
+ """
313
+ self.set_progress(self._progress + extra_progress)
314
+
315
+ def finish(self, message=None):
316
+ """finish(message=None)
317
+
318
+ Finish the progress, optionally specifying a message. This will
319
+ not set the progress to the maximum.
320
+ """
321
+ self.set_progress(self._progress, True) # fore update
322
+ self._status = 2
323
+ self._stop()
324
+ if message is not None:
325
+ self._write(message)
326
+
327
+ def fail(self, message=None):
328
+ """fail(message=None)
329
+
330
+ Stop the progress with a failure, optionally specifying a message.
331
+ """
332
+ self.set_progress(self._progress, True) # fore update
333
+ self._status = 3
334
+ self._stop()
335
+ message = "FAIL " + (message or "")
336
+ self._write(message)
337
+
338
+ def write(self, message):
339
+ """write(message)
340
+
341
+ Write a message during progress (such as a warning).
342
+ """
343
+ if self.__class__ == BaseProgressIndicator:
344
+ # When this class is used as a dummy, print explicit message
345
+ print(message)
346
+ else:
347
+ return self._write(message)
348
+
349
+ # Implementing classes should implement these
350
+
351
+ def _start(self):
352
+ pass
353
+
354
+ def _stop(self):
355
+ pass
356
+
357
+ def _update_progress(self, progressText):
358
+ pass
359
+
360
+ def _write(self, message):
361
+ pass
362
+
363
+
364
+ class StdoutProgressIndicator(BaseProgressIndicator):
365
+ """StdoutProgressIndicator(name)
366
+
367
+ A progress indicator that shows the progress in stdout. It
368
+ assumes that the tty can appropriately deal with backspace
369
+ characters.
370
+ """
371
+
372
+ def _start(self):
373
+ self._chars_prefix, self._chars = "", ""
374
+ # Write message
375
+ if self._action:
376
+ self._chars_prefix = "%s (%s): " % (self._name, self._action)
377
+ else:
378
+ self._chars_prefix = "%s: " % self._name
379
+ sys.stdout.write(self._chars_prefix)
380
+ sys.stdout.flush()
381
+
382
+ def _update_progress(self, progressText):
383
+ # If progress is unknown, at least make something move
384
+ if not progressText:
385
+ i1, i2, i3, i4 = "-\\|/"
386
+ M = {i1: i2, i2: i3, i3: i4, i4: i1}
387
+ progressText = M.get(self._chars, i1)
388
+ # Store new string and write
389
+ delChars = "\b" * len(self._chars)
390
+ self._chars = progressText
391
+ sys.stdout.write(delChars + self._chars)
392
+ sys.stdout.flush()
393
+
394
+ def _stop(self):
395
+ self._chars = self._chars_prefix = ""
396
+ sys.stdout.write("\n")
397
+ sys.stdout.flush()
398
+
399
+ def _write(self, message):
400
+ # Write message
401
+ delChars = "\b" * len(self._chars_prefix + self._chars)
402
+ sys.stdout.write(delChars + " " + message + "\n")
403
+ # Reprint progress text
404
+ sys.stdout.write(self._chars_prefix + self._chars)
405
+ sys.stdout.flush()
406
+
407
+
408
+ # From pyzolib/paths.py (https://bitbucket.org/pyzo/pyzolib/src/tip/paths.py)
409
+ def appdata_dir(appname=None, roaming=False):
410
+ """appdata_dir(appname=None, roaming=False)
411
+
412
+ Get the path to the application directory, where applications are allowed
413
+ to write user specific files (e.g. configurations). For non-user specific
414
+ data, consider using common_appdata_dir().
415
+ If appname is given, a subdir is appended (and created if necessary).
416
+ If roaming is True, will prefer a roaming directory (Windows Vista/7).
417
+ """
418
+
419
+ # Define default user directory
420
+ userDir = os.getenv("IMAGEIO_USERDIR", None)
421
+ if userDir is None:
422
+ userDir = os.path.expanduser("~")
423
+ if not os.path.isdir(userDir): # pragma: no cover
424
+ userDir = "/var/tmp" # issue #54
425
+
426
+ # Get system app data dir
427
+ path = None
428
+ if sys.platform.startswith("win"):
429
+ path1, path2 = os.getenv("LOCALAPPDATA"), os.getenv("APPDATA")
430
+ path = (path2 or path1) if roaming else (path1 or path2)
431
+ elif sys.platform.startswith("darwin"):
432
+ path = os.path.join(userDir, "Library", "Application Support")
433
+ # On Linux and as fallback
434
+ if not (path and os.path.isdir(path)):
435
+ path = userDir
436
+
437
+ # Maybe we should store things local to the executable (in case of a
438
+ # portable distro or a frozen application that wants to be portable)
439
+ prefix = sys.prefix
440
+ if getattr(sys, "frozen", None):
441
+ prefix = os.path.abspath(os.path.dirname(sys.executable))
442
+ for reldir in ("settings", "../settings"):
443
+ localpath = os.path.abspath(os.path.join(prefix, reldir))
444
+ if os.path.isdir(localpath): # pragma: no cover
445
+ try:
446
+ open(os.path.join(localpath, "test.write"), "wb").close()
447
+ os.remove(os.path.join(localpath, "test.write"))
448
+ except IOError:
449
+ pass # We cannot write in this directory
450
+ else:
451
+ path = localpath
452
+ break
453
+
454
+ # Get path specific for this app
455
+ if appname:
456
+ if path == userDir:
457
+ appname = "." + appname.lstrip(".") # Make it a hidden directory
458
+ path = os.path.join(path, appname)
459
+ if not os.path.isdir(path): # pragma: no cover
460
+ os.makedirs(path, exist_ok=True)
461
+
462
+ # Done
463
+ return path
464
+
465
+
466
+ def resource_dirs():
467
+ """resource_dirs()
468
+
469
+ Get a list of directories where imageio resources may be located.
470
+ The first directory in this list is the "resources" directory in
471
+ the package itself. The second directory is the appdata directory
472
+ (~/.imageio on Linux). The list further contains the application
473
+ directory (for frozen apps), and may include additional directories
474
+ in the future.
475
+ """
476
+ dirs = [resource_package_dir()]
477
+ # Resource dir baked in the package.
478
+ # Appdata directory
479
+ try:
480
+ dirs.append(appdata_dir("imageio"))
481
+ except Exception: # pragma: no cover
482
+ pass # The home dir may not be writable
483
+ # Directory where the app is located (mainly for frozen apps)
484
+ if getattr(sys, "frozen", None):
485
+ dirs.append(os.path.abspath(os.path.dirname(sys.executable)))
486
+ elif sys.path and sys.path[0]:
487
+ dirs.append(os.path.abspath(sys.path[0]))
488
+ return dirs
489
+
490
+
491
+ def resource_package_dir():
492
+ """package_dir
493
+
494
+ Get the resources directory in the imageio package installation
495
+ directory.
496
+
497
+ Notes
498
+ -----
499
+ This is a convenience method that is used by `resource_dirs` and
500
+ imageio entry point scripts.
501
+ """
502
+ # Make pkg_resources optional if setuptools is not available
503
+ try:
504
+ # Avoid importing pkg_resources in the top level due to how slow it is
505
+ # https://github.com/pypa/setuptools/issues/510
506
+ import pkg_resources
507
+ except ImportError:
508
+ pkg_resources = None
509
+
510
+ if pkg_resources:
511
+ # The directory returned by `pkg_resources.resource_filename`
512
+ # also works with eggs.
513
+ pdir = pkg_resources.resource_filename("imageio", "resources")
514
+ else:
515
+ # If setuptools is not available, use fallback
516
+ pdir = os.path.abspath(os.path.join(THIS_DIR, "..", "resources"))
517
+ return pdir
518
+
519
+
520
+ def get_platform():
521
+ """get_platform()
522
+
523
+ Get a string that specifies the platform more specific than
524
+ sys.platform does. The result can be: linux32, linux64, win32,
525
+ win64, osx32, osx64. Other platforms may be added in the future.
526
+ """
527
+ # Get platform
528
+ if sys.platform.startswith("linux"):
529
+ plat = "linux%i"
530
+ elif sys.platform.startswith("win"):
531
+ plat = "win%i"
532
+ elif sys.platform.startswith("darwin"):
533
+ plat = "osx%i"
534
+ elif sys.platform.startswith("freebsd"):
535
+ plat = "freebsd%i"
536
+ else: # pragma: no cover
537
+ return None
538
+
539
+ return plat % (struct.calcsize("P") * 8) # 32 or 64 bits
540
+
541
+
542
+ def has_module(module_name):
543
+ """Check to see if a python module is available."""
544
+ if sys.version_info > (3, 4):
545
+ import importlib
546
+
547
+ name_parts = module_name.split(".")
548
+ for i in range(len(name_parts)):
549
+ if importlib.util.find_spec(".".join(name_parts[: i + 1])) is None:
550
+ return False
551
+ return True
552
+ else: # pragma: no cover
553
+ import imp
554
+
555
+ try:
556
+ imp.find_module(module_name)
557
+ except ImportError:
558
+ return False
559
+ return True
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/bsdf.cpython-38.pyc ADDED
Binary file (11.1 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/dicom.cpython-38.pyc ADDED
Binary file (7.75 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/example.cpython-38.pyc ADDED
Binary file (3.52 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/ffmpeg.cpython-38.pyc ADDED
Binary file (18.3 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/freeimagemulti.cpython-38.pyc ADDED
Binary file (10.7 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/grab.cpython-38.pyc ADDED
Binary file (3.74 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/pyav.cpython-38.pyc ADDED
Binary file (29.7 kB). View file
 
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/imageio/plugins/__pycache__/spe.cpython-38.pyc ADDED
Binary file (20.5 kB). View file