diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/common/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ccb65767ac1ea637378d5031ff7e6d51b6abff9d Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/common/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/backports/__init__.py b/.venv/lib/python3.11/site-packages/opencensus/common/backports/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..23c08786d0c885e158bffe7d881d5cd5a9f0bc68 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/common/backports/__init__.py @@ -0,0 +1,79 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import six + +import weakref + + +class WeakMethod(weakref.ref): # pragma: NO COVER + """ + A custom `weakref.ref` subclass which simulates a weak reference to + a bound method, working around the lifetime problem of bound methods. + + This is a copy of the WeakMethod class that ships with weakref in the + python 3.7 standard library, adapted to work in 2.6. See: + https://github.com/python/cpython/blob/a31f4cc881992e84d351957bd9ac1a92f882fa39/Lib/weakref.py#L36-L87 + """ # noqa + + __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" + + def __new__(cls, meth, callback=None): + try: + obj = meth.__self__ + func = meth.__func__ + except AttributeError: + error = TypeError("argument should be a bound method, not {}" + .format(type(meth))) + six.raise_from(error, None) + + def _cb(arg): + # The self-weakref trick is needed to avoid creating a reference + # cycle. + self = self_wr() + if self._alive: + self._alive = False + if callback is not None: + callback(self) + self = weakref.ref.__new__(cls, obj, _cb) + self._func_ref = weakref.ref(func, _cb) + self._meth_type = type(meth) + self._alive = True + self_wr = weakref.ref(self) + return self + + def __call__(self): + obj = super(WeakMethod, self).__call__() + func = self._func_ref() + if obj is None or func is None: + return None + return self._meth_type(func, obj) + + def __eq__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is other + return (weakref.ref.__eq__(self, other) + and self._func_ref == other._func_ref) + return False + + def __ne__(self, other): + if isinstance(other, WeakMethod): + if not self._alive or not other._alive: + return self is not other + return (weakref.ref.__ne__(self, other) + or self._func_ref != other._func_ref) + return True + + __hash__ = weakref.ref.__hash__ diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/backports/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/common/backports/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be2ca7d05a8f30bbb240c10ba82f17c43d742ffa Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/common/backports/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/configuration/__init__.py b/.venv/lib/python3.11/site-packages/opencensus/common/configuration/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cbd8cf288d5271f59a2a451a82f3c0110a47b581 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/common/configuration/__init__.py @@ -0,0 +1,46 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import importlib + +__all__ = ['Namespace', 'load'] + + +class Namespace(object): + def __init__(self, name, parent=None): + self.parent = parent + self.name = name + + def __getattr__(self, name): + return type(self)(name, self) + + def __str__(self): + if self.parent is None: + return self.name + return '{!s}.{}'.format(self.parent, self.name) + + def __call__(self, *args, **kwargs): + ctor = getattr(importlib.import_module(str(self.parent)), self.name) + return ctor(*args, **kwargs) + + @classmethod + def eval(cls, expr): + return eval(expr, {}, {'opencensus': cls('opencensus')}) + + +def load(expr): + """Dynamically import OpenCensus components and evaluate the provided + configuration expression. + """ + return Namespace.eval(expr) diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/configuration/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/common/configuration/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..613cea2178ef8733f16da92b800976f17322bd19 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/common/configuration/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/__init__.py b/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/gcp_metadata_config.py b/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/gcp_metadata_config.py new file mode 100644 index 0000000000000000000000000000000000000000..0cdfde6744998c4f9a118d03047ab22c8f994273 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/gcp_metadata_config.py @@ -0,0 +1,115 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opencensus.common.http_handler import get_request + +_GCP_METADATA_URI = 'http://metadata.google.internal/computeMetadata/v1/' +_GCP_METADATA_URI_HEADER = {'Metadata-Flavor': 'Google'} + +# ID of the GCP project associated with this resource, such as "my-project" +PROJECT_ID_KEY = 'project_id' + +# Numeric VM instance identifier assigned by GCE +INSTANCE_ID_KEY = 'instance_id' + +# The GCE zone in which the VM is running +ZONE_KEY = 'zone' + +# GKE cluster name +CLUSTER_NAME_KEY = 'instance/attributes/cluster-name' + +# GCE common attributes +# See: https://cloud.google.com/appengine/docs/flexible/python/runtime#environment_variables # noqa +_GCE_ATTRIBUTES = { + PROJECT_ID_KEY: 'project/project-id', + INSTANCE_ID_KEY: 'instance/id', + ZONE_KEY: 'instance/zone' +} + +_ATTRIBUTE_URI_TRANSFORMATIONS = { + _GCE_ATTRIBUTES[ZONE_KEY]: + lambda v: v[v.rfind('/') + 1:] if '/' in v else v +} + +_GCP_METADATA_MAP = {} + + +class GcpMetadataConfig(object): + """GcpMetadata represents metadata retrieved from GCP (GKE and GCE) + environment. Some attributes are retrieved from the system environment. + see : https://cloud.google.com/compute/docs/storing + -retrieving-metadata + """ + inited = False + is_running = False + + @classmethod + def _initialize_metadata_service(cls): + """Initialize metadata service once and load gcp metadata into map + This method should only be called once. + """ + if cls.inited: + return + + instance_id = cls.get_attribute('instance/id') + + if instance_id is not None: + cls.is_running = True + + _GCP_METADATA_MAP['instance_id'] = instance_id + + # fetch attributes from metadata request + for attribute_key, attribute_uri in _GCE_ATTRIBUTES.items(): + if attribute_key not in _GCP_METADATA_MAP: + attribute_value = cls.get_attribute(attribute_uri) + if attribute_value is not None: # pragma: NO COVER + _GCP_METADATA_MAP[attribute_key] = attribute_value + + cls.inited = True + + @classmethod + def is_running_on_gcp(cls): + cls._initialize_metadata_service() + return cls.is_running + + def get_gce_metadata(self): + """for GCP GCE instance""" + if self.is_running_on_gcp(): + return _GCP_METADATA_MAP + + return dict() + + @staticmethod + def get_attribute(attribute_uri): + """ + Fetch the requested instance metadata entry. + :param attribute_uri: attribute_uri: attribute name relative to the + computeMetadata/v1 prefix + :return: The value read from the metadata service or None + """ + attribute_value = get_request(_GCP_METADATA_URI + attribute_uri, + _GCP_METADATA_URI_HEADER) + + if attribute_value is not None and isinstance(attribute_value, bytes): + # At least in python3, bytes are are returned from + # urllib (although the response is text), convert + # to a normal string: + attribute_value = attribute_value.decode('utf-8') + + transformation = _ATTRIBUTE_URI_TRANSFORMATIONS.get(attribute_uri) + if transformation is not None: + attribute_value = transformation(attribute_value) + + return attribute_value diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/k8s_utils.py b/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/k8s_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..82265f505125cc76fbd5a6cf5eb325b05c5bebcb --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/common/monitored_resource/k8s_utils.py @@ -0,0 +1,64 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from opencensus.common.monitored_resource import gcp_metadata_config + +# Env var that signals that we're in a kubernetes container +_KUBERNETES_SERVICE_HOST = 'KUBERNETES_SERVICE_HOST' + +# Name of the cluster the container is running in +CLUSTER_NAME_KEY = 'k8s.io/cluster/name' + +# ID of the instance the container is running on +NAMESPACE_NAME_KEY = 'k8s.io/namespace/name' + +# Container pod ID +POD_NAME_KEY = 'k8s.io/pod/name' + +# Container name +CONTAINER_NAME_KEY = 'k8s.io/container/name' + +# Attributes set from environment variables +_K8S_ENV_ATTRIBUTES = { + CONTAINER_NAME_KEY: 'CONTAINER_NAME', + NAMESPACE_NAME_KEY: 'NAMESPACE', + POD_NAME_KEY: 'HOSTNAME' +} + + +def is_k8s_environment(): + """Whether the environment is a kubernetes container. + + The KUBERNETES_SERVICE_HOST environment variable must be set. + """ + return _KUBERNETES_SERVICE_HOST in os.environ + + +def get_k8s_metadata(): + """Get kubernetes container metadata, as on GCP GKE.""" + k8s_metadata = {} + + gcp_cluster = (gcp_metadata_config.GcpMetadataConfig + .get_attribute(gcp_metadata_config.CLUSTER_NAME_KEY)) + if gcp_cluster is not None: + k8s_metadata[CLUSTER_NAME_KEY] = gcp_cluster + + for attribute_key, attribute_env in _K8S_ENV_ATTRIBUTES.items(): + attribute_value = os.environ.get(attribute_env) + if attribute_value is not None: + k8s_metadata[attribute_key] = attribute_value + + return k8s_metadata diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/version/__init__.py b/.venv/lib/python3.11/site-packages/opencensus/common/version/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2927eec884d46d0a26b3fa97b613a787710b1c20 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/common/version/__init__.py @@ -0,0 +1,15 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +__version__ = '0.11.4' diff --git a/.venv/lib/python3.11/site-packages/opencensus/common/version/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/common/version/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..70296e49039ce93f0b05f127a1ba10f4a17457a8 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/common/version/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/__init__.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__init__.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44601c82f640b82e1f2f896319093185ccad7ef7 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/cumulative.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/cumulative.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c6c8c408b8d2be13561d3e1385faa20bd7a31bc0 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/cumulative.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/gauge.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/gauge.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..42627763c065e7f1fd73a67bb553bcf318356186 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/gauge.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/metric.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/metric.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c889c03de26b0b21d92feedb9ec9f33c78df99ad Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/metric.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/point.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/point.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45230e03519d530c1d9033445d74b0e72a1d0cb9 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/point.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/summary.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/summary.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9f62746232dc8983f31be3c852d5fff8ad2d042e Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/summary.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/time_series.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/time_series.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03c32e68b9c0d3d7bc11ffe69276476dfe95d225 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/__pycache__/time_series.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/cumulative.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/cumulative.py new file mode 100644 index 0000000000000000000000000000000000000000..10eea4472bd5a9903af1fc53c8b0fdccdf3d595e --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/cumulative.py @@ -0,0 +1,87 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import six + +from opencensus.metrics.export import gauge, metric_descriptor + + +class CumulativePointLong(gauge.GaugePointLong): + """A `GaugePointLong` that cannot decrease.""" + + def _set(self, val): + if not isinstance(val, six.integer_types): + raise ValueError("CumulativePointLong only supports integer types") + if val > self.get_value(): + super(CumulativePointLong, self)._set(val) + + def add(self, val): + """Add `val` to the current value if it's positive. + + Return without adding if `val` is not positive. + + :type val: int + :param val: Value to add. + """ + if not isinstance(val, six.integer_types): + raise ValueError("CumulativePointLong only supports integer types") + if val > 0: + super(CumulativePointLong, self).add(val) + + +class CumulativePointDouble(gauge.GaugePointDouble): + """A `GaugePointDouble` that cannot decrease.""" + + def _set(self, val): + if val > self.get_value(): + super(CumulativePointDouble, self)._set(val) + + def add(self, val): + """Add `val` to the current value if it's positive. + + Return without adding if `val` is not positive. + + :type val: float + :param val: Value to add. + """ + if val > 0: + super(CumulativePointDouble, self).add(val) + + +class LongCumulativeMixin(object): + """Type mixin for long-valued cumulative measures.""" + descriptor_type = metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64 + point_type = CumulativePointLong + + +class DoubleCumulativeMixin(object): + """Type mixin for float-valued cumulative measures.""" + descriptor_type = metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE + point_type = CumulativePointDouble + + +class LongCumulative(LongCumulativeMixin, gauge.Gauge): + """Records cumulative int-valued measurements.""" + + +class DoubleCumulative(DoubleCumulativeMixin, gauge.Gauge): + """Records cumulative float-valued measurements.""" + + +class DerivedLongCumulative(LongCumulativeMixin, gauge.DerivedGauge): + """Records derived cumulative int-valued measurements.""" + + +class DerivedDoubleCumulative(DoubleCumulativeMixin, gauge.DerivedGauge): + """Records derived cumulative float-valued measurements.""" diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/gauge.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/gauge.py new file mode 100644 index 0000000000000000000000000000000000000000..149f38328431ad9f3cdaf4469811913a95373891 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/gauge.py @@ -0,0 +1,513 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import six + +import threading +from collections import OrderedDict +from datetime import datetime + +from opencensus.common import utils +from opencensus.metrics.export import ( + metric, + metric_descriptor, + metric_producer, +) +from opencensus.metrics.export import point as point_module +from opencensus.metrics.export import time_series +from opencensus.metrics.export import value as value_module + + +def get_timeseries_list(points, timestamp): + """Convert a list of `GaugePoint`s into a list of `TimeSeries`. + + Get a :class:`opencensus.metrics.export.time_series.TimeSeries` for each + measurement in `points`. Each series contains a single + :class:`opencensus.metrics.export.point.Point` that represents the last + recorded value of the measurement. + + :type points: list(:class:`GaugePoint`) + :param points: The list of measurements to convert. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: Recording time to report, usually the current time. + + :rtype: list(:class:`opencensus.metrics.export.time_series.TimeSeries`) + :return: A list of one `TimeSeries` for each point in `points`. + """ + ts_list = [] + for lv, gp in points.items(): + point = point_module.Point(gp.to_point_value(), timestamp) + ts_list.append(time_series.TimeSeries(lv, [point], timestamp)) + return ts_list + + +class GaugePoint(object): + + def to_point_value(self): + raise NotImplementedError # pragma: NO COVER + + def get_value(self): + raise NotImplementedError # pragma: NO COVER + + +class GaugePointLong(GaugePoint): + """An instantaneous measurement from a LongGauge. + + A GaugePointLong represents the most recent measurement from a + :class:`LongGauge` for a given set of label values. + """ + + def __init__(self): + self.value = 0 + self._value_lock = threading.Lock() + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.value + )) + + def add(self, val): + """Add `val` to the current value. + + :type val: int + :param val: Value to add. + """ + if not isinstance(val, six.integer_types): + raise ValueError("GaugePointLong only supports integer types") + with self._value_lock: + self.value += val + + def _set(self, val): + if not isinstance(val, six.integer_types): + raise ValueError("GaugePointLong only supports integer types") + with self._value_lock: + self.value = val + + def set(self, val): + """Set the current value to `val`. + + :type val: int + :param val: Value to set. + """ + self._set(val) + + def get_value(self): + """Get the current value. + + :rtype: int + :return: The current value of the measurement. + """ + return self.value + + def to_point_value(self): + """Get a point value conversion of the current value. + + :rtype: :class:`opencensus.metrics.export.value.ValueLong` + :return: A converted `ValueLong`. + """ + return value_module.ValueLong(self.value) + + +class GaugePointDouble(GaugePoint): + """An instantaneous measurement from a DoubleGauge. + + A `GaugePointDouble` represents the most recent measurement from a + :class:`DoubleGauge` for a given set of label values. + """ + + def __init__(self): + self.value = 0.0 + self._value_lock = threading.Lock() + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.value + )) + + def add(self, val): + """Add `val` to the current value. + + :type val: float + :param val: Value to add. + """ + with self._value_lock: + self.value += val + + def _set(self, val): + with self._value_lock: + self.value = float(val) + + def set(self, val): + """Set the current value to `val`. + + :type val: float + :param val: Value to set. + """ + self._set(val) + + def get_value(self): + """Get the current value. + + :rtype: float + :return: The current value of the measurement. + """ + return self.value + + def to_point_value(self): + """Get a point value conversion of the current value. + + :rtype: :class:`opencensus.metrics.export.value.ValueDouble` + :return: A converted `ValueDouble`. + """ + return value_module.ValueDouble(self.value) + + +class DerivedGaugePoint(GaugePoint): + """Wraps a `GaugePoint` to automatically track the value of a function. + + A `DerivedGaugePoint` is a read-only measure that stores the most recently + read value of a given function in a mutable `GaugePoint`. Calling + `get_value` or `to_point_value` calls the tracked function and updates the + wrapped `GaugePoint`. + + :type func: function + :param func: The function to track. + + :type gauge_point: :class:`GaugePointLong`, :class:`GaugePointDouble`, + :class:`opencensus.metrics.export.cumulative.CumulativePointLong`, or + :class:`opencensus.metrics.export.cumulative.CumulativePointDouble` + :param gauge_point: The underlying `GaugePoint`. + """ + def __init__(self, func, gauge_point, **kwargs): + self.gauge_point = gauge_point + self.func = utils.get_weakref(func) + self._kwargs = kwargs + + def __repr__(self): + return ("{}({})({})" + .format( + type(self).__name__, + self.func(), + self._kwargs + )) + + def get_value(self): + """Get the current value of the underlying measurement. + + Calls the tracked function and stores the value in the wrapped + measurement as a side-effect. + + :rtype: int, float, or None + :return: The current value of the wrapped function, or `None` if it no + longer exists. + """ + try: + val = self.func()(**self._kwargs) + except TypeError: # The underlying function has been GC'd + return None + + self.gauge_point._set(val) + return self.gauge_point.get_value() + + def to_point_value(self): + """Get a point value conversion of the current value. + + Calls the tracked function and stores the value in the wrapped + measurement as a side-effect. + + :rtype: :class:`opencensus.metrics.export.value.ValueLong`, + :class:`opencensus.metrics.export.value.ValueDouble`, or None + :return: The point value conversion of the underlying `GaugePoint`, or + None if the tracked function no longer exists. + """ + if self.get_value() is None: + return None + return self.gauge_point.to_point_value() + + +class BaseGauge(object): + """Base class for sets instantaneous measurements.""" + + def __init__(self, name, description, unit, label_keys): + self._len_label_keys = len(label_keys) + self.default_label_values = [None] * self._len_label_keys + self.descriptor = metric_descriptor.MetricDescriptor( + name, description, unit, self.descriptor_type, label_keys) + self.points = OrderedDict() + self._points_lock = threading.Lock() + + def __repr__(self): + return ('{}(descriptor.name="{}", points={})' + .format( + type(self).__name__, + self.descriptor.name, + self.points + )) + + def _remove_time_series(self, label_values): + with self._points_lock: + try: + del self.points[tuple(label_values)] + except KeyError: + pass + + def remove_time_series(self, label_values): + """Remove the time series for specific label values. + + :type label_values: list(:class:`LabelValue`) + :param label_values: Label values of the time series to remove. + """ + if label_values is None: + raise ValueError + if any(lv is None for lv in label_values): + raise ValueError + if len(label_values) != self._len_label_keys: + raise ValueError + self._remove_time_series(label_values) + + def remove_default_time_series(self): + """Remove the default time series for this gauge.""" + self._remove_time_series(self.default_label_values) + + def clear(self): + """Remove all points from this gauge.""" + with self._points_lock: + self.points = OrderedDict() + + def get_metric(self, timestamp): + """Get a metric including all current time series. + + Get a :class:`opencensus.metrics.export.metric.Metric` with one + :class:`opencensus.metrics.export.time_series.TimeSeries` for each + set of label values with a recorded measurement. Each `TimeSeries` + has a single point that represents the last recorded value. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: Recording time to report, usually the current time. + + :rtype: :class:`opencensus.metrics.export.metric.Metric` or None + :return: A converted metric for all current measurements. + """ + if not self.points: + return None + + with self._points_lock: + ts_list = get_timeseries_list(self.points, timestamp) + return metric.Metric(self.descriptor, ts_list) + + @property + def descriptor_type(self): # pragma: NO COVER + raise NotImplementedError + + @property + def point_type(self): # pragma: NO COVER + raise NotImplementedError + + +class Gauge(BaseGauge): + """A set of mutable, instantaneous measurements of the same type. + + End users should use :class:`LongGauge`, :class:`DoubleGauge`, + :class:`opencensus.metrics.export.cumulative.LongCumulative`, or + :class:`opencensus.metrics.export.cumulative.DoubleCumulative` instead of + using this class directly. + + The constructor arguments are used to create a + :class:`opencensus.metrics.export.metric_descriptor.MetricDescriptor` for + converted metrics. See that class for details. + """ + + def _get_or_create_time_series(self, label_values): + with self._points_lock: + return self.points.setdefault( + tuple(label_values), self.point_type()) + + def get_or_create_time_series(self, label_values): + """Get a mutable measurement for the given set of label values. + + :type label_values: list(:class:`LabelValue`) + :param label_values: The measurement's label values. + + :rtype: :class:`GaugePointLong`, :class:`GaugePointDouble` + :class:`opencensus.metrics.export.cumulative.CumulativePointLong`, + or + :class:`opencensus.metrics.export.cumulative.CumulativePointDouble` + :return: A mutable point that represents the last value of the + measurement. + """ + if label_values is None: + raise ValueError + if any(lv is None for lv in label_values): + raise ValueError + if len(label_values) != self._len_label_keys: + raise ValueError + return self._get_or_create_time_series(label_values) + + def get_or_create_default_time_series(self): + """Get the default measurement for this gauge. + + Each gauge has a default point not associated with any specific label + values. When this gauge is exported as a metric via `get_metric` the + time series associated with this point will have null label values. + + :rtype: :class:`GaugePointLong`, :class:`GaugePointDouble` + :class:`opencensus.metrics.export.cumulative.CumulativePointLong`, + or + :class:`opencensus.metrics.export.cumulative.CumulativePointDouble` + :return: A mutable point that represents the last value of the + measurement. + """ + return self._get_or_create_time_series(self.default_label_values) + + +class LongGaugeMixin(object): + """Type mixin for long-valued gauges.""" + descriptor_type = metric_descriptor.MetricDescriptorType.GAUGE_INT64 + point_type = GaugePointLong + + +class DoubleGaugeMixin(object): + """Type mixin for float-valued gauges.""" + descriptor_type = metric_descriptor.MetricDescriptorType.GAUGE_DOUBLE + point_type = GaugePointDouble + + +class LongGauge(LongGaugeMixin, Gauge): + """Gauge for recording int-valued measurements.""" + + +class DoubleGauge(DoubleGaugeMixin, Gauge): + """Gauge for recording float-valued measurements.""" + + +class DerivedGauge(BaseGauge): + """Gauge that tracks values of other functions. + + Each of a `DerivedGauge`'s measurements are associated with a function + which is called when the gauge is exported. + + End users should use :class:`DerivedLongGauge`, :class:`DerivedDoubleGauge` + :class:`opencensus.metrics.export.cumulative.DerivedLongCumulative`, or + :class:`opencensus.metrics.export.cumulative.DerivedDoubleCumulative` + instead of using this class directly. + """ + + def _create_time_series(self, label_values, func, **kwargs): + with self._points_lock: + return self.points.setdefault( + tuple(label_values), + DerivedGaugePoint(func, self.point_type(), **kwargs)) + + def create_time_series(self, label_values, func, **kwargs): + """Create a derived measurement to trac `func`. + + :type label_values: list(:class:`LabelValue`) + :param label_values: The measurement's label values. + + :type func: function + :param func: The function to track. + + :rtype: :class:`DerivedGaugePoint` + :return: A read-only measurement that tracks `func`. + """ + if label_values is None: + raise ValueError + if any(lv is None for lv in label_values): + raise ValueError + if len(label_values) != self._len_label_keys: + raise ValueError + if func is None: + raise ValueError + return self._create_time_series(label_values, func, **kwargs) + + def create_default_time_series(self, func): + """Create the default derived measurement for this gauge. + + :type func: function + :param func: The function to track. + + :rtype: :class:`DerivedGaugePoint` + :return: A read-only measurement that tracks `func`. + """ + if func is None: + raise ValueError + return self._create_time_series(self.default_label_values, func) + + +class DerivedLongGauge(LongGaugeMixin, DerivedGauge): + """Gauge for derived int-valued measurements.""" + + +class DerivedDoubleGauge(DoubleGaugeMixin, DerivedGauge): + """Gauge for derived float-valued measurements.""" + + +class Registry(metric_producer.MetricProducer): + """A collection of gauges to be exported together. + + Each registered gauge must have a unique `descriptor.name`. + """ + + def __init__(self): + self.gauges = {} + self._gauges_lock = threading.Lock() + + def __repr__(self): + return ('{}(gauges={}' + .format( + type(self).__name__, + self.gauges + )) + + def add_gauge(self, gauge): + """Add `gauge` to the registry. + + Raises a `ValueError` if another gauge with the same name already + exists in the registry. + + :type gauge: class:`LongGauge`, class:`DoubleGauge`, + :class:`opencensus.metrics.export.cumulative.LongCumulative`, + :class:`opencensus.metrics.export.cumulative.DoubleCumulative`, + :class:`DerivedLongGauge`, :class:`DerivedDoubleGauge` + :class:`opencensus.metrics.export.cumulative.DerivedLongCumulative`, + or + :class:`opencensus.metrics.export.cumulative.DerivedDoubleCumulative` + :param gauge: The gauge to add to the registry. + """ + if gauge is None: + raise ValueError + name = gauge.descriptor.name + with self._gauges_lock: + if name in self.gauges: + raise ValueError( + 'Another gauge named "{}" is already registered' + .format(name)) + self.gauges[name] = gauge + + def get_metrics(self): + """Get a metric for each gauge in the registry at the current time. + + :rtype: set(:class:`opencensus.metrics.export.metric.Metric`) + :return: A set of `Metric`s, one for each registered gauge. + """ + now = datetime.utcnow() + metrics = set() + for gauge in self.gauges.values(): + metrics.add(gauge.get_metric(now)) + return metrics diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric.py new file mode 100644 index 0000000000000000000000000000000000000000..658a27e45125376833965c07c6c3db599f5498f8 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric.py @@ -0,0 +1,79 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opencensus.metrics.export import metric_descriptor + + +class Metric(object): + """A collection of time series data and label metadata. + + This class implements the spec for v1 Metrics as of opencensus-proto + release v0.1.0. See opencensus-proto for details: + + https://github.com/census-instrumentation/opencensus-proto/blob/v0.1.0/src/opencensus/proto/metrics/v1/metrics.proto#L35 + + Defines a Metric which has one or more timeseries. + + :type descriptor: class: '~opencensus.metrics.export.metric_descriptor.MetricDescriptor' + :param descriptor: The metric's descriptor. + + :type timeseries: list(:class: '~opencensus.metrics.export.time_series.TimeSeries') + :param timeseries: One or more timeseries for a single metric, where each + timeseries has one or more points. + """ # noqa + + def __init__(self, descriptor, time_series): + if not time_series: + raise ValueError("time_series must not be empty or null") + if descriptor is None: + raise ValueError("descriptor must not be null") + self._time_series = time_series + self._descriptor = descriptor + self._check_type() + + def __repr__(self): + return ('{}(time_series={}, descriptor.name="{}")' + .format( + type(self).__name__, + "<{} TimeSeries>".format(len(self.time_series)), + self.descriptor.name, + )) + + @property + def time_series(self): + return self._time_series + + @property + def descriptor(self): + return self._descriptor + + def _check_type(self): + """Check that point value types match the descriptor type.""" + check_type = metric_descriptor.MetricDescriptorType.to_type_class( + self.descriptor.type) + for ts in self.time_series: + if not ts.check_points_type(check_type): + raise ValueError("Invalid point value type") + + def _check_start_timestamp(self): + """Check that starting timestamp exists for cumulative metrics.""" + if self.descriptor.type in ( + metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64, + metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE, + metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION, + ): + for ts in self.time_series: + if ts.start_timestamp is None: + raise ValueError("time_series.start_timestamp must exist " + "for cumulative metrics") diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric_descriptor.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric_descriptor.py new file mode 100644 index 0000000000000000000000000000000000000000..955a5ff879d9ce401b98e55a725751ec81f3bba7 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric_descriptor.py @@ -0,0 +1,174 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import six + +from opencensus.metrics.export import value + + +class _MetricDescriptorTypeMeta(type): + """Helper for `x in MetricDescriptorType`.""" + + def __contains__(cls, item): + return item in { + MetricDescriptorType.GAUGE_INT64, + MetricDescriptorType.GAUGE_DOUBLE, + MetricDescriptorType.GAUGE_DISTRIBUTION, + MetricDescriptorType.CUMULATIVE_INT64, + MetricDescriptorType.CUMULATIVE_DOUBLE, + MetricDescriptorType.CUMULATIVE_DISTRIBUTION + } + + +@six.add_metaclass(_MetricDescriptorTypeMeta) +class MetricDescriptorType(object): + """The kind of metric. It describes how the data is reported. + + MetricDescriptorType is an enum of valid MetricDescriptor type values. See + opencensus-proto for details: + + https://github.com/census-instrumentation/opencensus-proto/blob/v0.1.0/src/opencensus/proto/metrics/v1/metrics.proto#L79 + + A gauge is an instantaneous measurement of a value. + + A cumulative measurement is a value accumulated over a time interval. In a + time series, cumulative measurements should have the same start time and + increasing end times, until an event resets the cumulative value to zero + and sets a new start time for the following points. + + """ + # Integer gauge. The value can go both up and down. + GAUGE_INT64 = 1 + + # Floating point gauge. The value can go both up and down. + GAUGE_DOUBLE = 2 + + # Distribution gauge measurement. The count and sum can go both up and + # down. Recorded values are always >= 0. + # Used in scenarios like a snapshot of time the current items in a queue + # have spent there. + GAUGE_DISTRIBUTION = 3 + + # Integer cumulative measurement. The value cannot decrease, if resets then + # the start_time should also be reset. + CUMULATIVE_INT64 = 4 + + # Floating point cumulative measurement. The value cannot decrease, if + # resets then the start_time should also be reset. Recorded values are + # always >= 0. + CUMULATIVE_DOUBLE = 5 + + # Distribution cumulative measurement. The count and sum cannot decrease, + # if resets then the start_time should also be reset. + CUMULATIVE_DISTRIBUTION = 6 + + # Some frameworks implemented Histograms as a summary of observations + # (usually things like request durations and response sizes). While it also + # provides a total count of observations and a sum of all observed values, + # it calculates configurable percentiles over a sliding time window. This + # is not recommended, since it cannot be aggregated. + SUMMARY = 7 + + _type_map = { + GAUGE_INT64: value.ValueLong, + GAUGE_DOUBLE: value.ValueDouble, + GAUGE_DISTRIBUTION: value.ValueDistribution, + CUMULATIVE_INT64: value.ValueLong, + CUMULATIVE_DOUBLE: value.ValueDouble, + CUMULATIVE_DISTRIBUTION: value.ValueDistribution, + SUMMARY: value.ValueSummary + } + + @classmethod + def to_type_class(cls, metric_descriptor_type): + try: + return cls._type_map[metric_descriptor_type] + except KeyError: + raise ValueError("Unknown MetricDescriptorType value") + + +class MetricDescriptor(object): + """Defines a metric type and its schema. + + This class implements the spec for v1 MetricDescriptors, as of + opencensus-proto release v0.1.0. See opencensus-proto for details: + + https://github.com/census-instrumentation/opencensus-proto/blob/v0.1.0/src/opencensus/proto/metrics/v1/metrics.proto#L59 + + :type name: str + :param name: The metric type, including its DNS name prefix. It must be + unique. + + :type description: str + :param description: A detailed description of the metric, which can be used + in documentation. + + :type unit: str + :param unit: The unit in which the metric value is reported. Follows the + format described by http://unitsofmeasure.org/ucum.html. + + :type type_: int + :param type_: The type of metric. MetricDescriptorType enumerates the valid + options. + + :type label_keys: list(:class: '~opencensus.metrics.label_key.LabelKey') + :param label_keys: The label keys associated with the metric descriptor. + """ + + def __init__(self, name, description, unit, type_, label_keys): + if type_ not in MetricDescriptorType: + raise ValueError("Invalid type") + + if label_keys is None: + raise ValueError("label_keys must not be None") + + if any(key is None for key in label_keys): + raise ValueError("label_keys must not contain null keys") + + self._name = name + self._description = description + self._unit = unit + self._type = type_ + self._label_keys = label_keys + + def __repr__(self): + type_name = MetricDescriptorType.to_type_class(self.type).__name__ + return ('{}(name="{}", description="{}", unit={}, type={})' + .format( + type(self).__name__, + self.name, + self.description, + self.unit, + type_name, + )) + + @property + def name(self): + return self._name + + @property + def description(self): + return self._description + + @property + def unit(self): + return self._unit + + @property + def type(self): + return self._type + + @property + def label_keys(self): + return self._label_keys diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric_producer.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric_producer.py new file mode 100644 index 0000000000000000000000000000000000000000..a5f53e18d8ead2a35ec43e8175088906d3c8f76e --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/metric_producer.py @@ -0,0 +1,81 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + + +class MetricProducer(object): + """Produces a set of metrics for export.""" + + def get_metrics(self): + """Get a set of metrics to be exported. + + :rtype: set(:class: `opencensus.metrics.export.metric.Metric`) + :return: A set of metrics to be exported. + """ + raise NotImplementedError # pragma: NO COVER + + +class MetricProducerManager(object): + """Container class for MetricProducers to be used by exporters. + + :type metric_producers: iterable(class: 'MetricProducer') + :param metric_producers: Optional initial metric producers. + """ + + def __init__(self, metric_producers=None): + if metric_producers is None: + self.metric_producers = set() + else: + self.metric_producers = set(metric_producers) + self.mp_lock = threading.Lock() + + def add(self, metric_producer): + """Add a metric producer. + + :type metric_producer: :class: 'MetricProducer' + :param metric_producer: The metric producer to add. + """ + if metric_producer is None: + raise ValueError + with self.mp_lock: + self.metric_producers.add(metric_producer) + + def remove(self, metric_producer): + """Remove a metric producer. + + :type metric_producer: :class: 'MetricProducer' + :param metric_producer: The metric producer to remove. + """ + if metric_producer is None: + raise ValueError + try: + with self.mp_lock: + self.metric_producers.remove(metric_producer) + except KeyError: + pass + + def get_all(self): + """Get the set of all metric producers. + + Get a copy of `metric_producers`. Prefer this method to using the + attribute directly to avoid other threads adding/removing producers + while you're reading it. + + :rtype: set(:class: `MetricProducer`) + :return: A set of all metric producers at the time of the call. + """ + with self.mp_lock: + mps_copy = set(self.metric_producers) + return mps_copy diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/point.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/point.py new file mode 100644 index 0000000000000000000000000000000000000000..0168f211337d33ed43385d77f4d3ab01a9308643 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/point.py @@ -0,0 +1,47 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class Point(object): + """A timestamped measurement of a TimeSeries. + + :type value: :class:`opencensus.metrics.export.value.ValueDouble` or + :class:`opencensus.metrics.export.value.ValueLong` or + :class:`opencensus.metrics.export.value.ValueSummary` or + :class:`opencensus.metrics.export.value.ValueDistribution` + :param value: the point value. + + :type timestamp: time + :param timestamp: the timestamp when the `Point` was recorded. + """ + + def __init__(self, value, timestamp): + self._value = value + self._timestamp = timestamp + + @property + def value(self): + return self._value + + @property + def timestamp(self): + return self._timestamp + + def __repr__(self): + return ("{}(value={}, timestamp={})" + .format( + type(self).__name__, + self.value, + self.timestamp + )) diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/summary.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/summary.py new file mode 100644 index 0000000000000000000000000000000000000000..d7fddcbf20b53107ba93431cb5205ee7817430e4 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/summary.py @@ -0,0 +1,144 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class Summary(object): + """Implementation of the Summary as a summary of observations. + + :type count: long + :param count: the count of the population values. + + :type sum_data: float + :param sum_data: the sum of the population values. + + :type snapshot: Snapshot + :param snapshot: the values calculated over a sliding time window. + """ + + def __init__(self, count, sum_data, snapshot): + check_count_and_sum(count, sum_data) + self._count = count + self._sum_data = sum_data + + if snapshot is None: + raise ValueError('snapshot must not be none') + + self._snapshot = snapshot + + @property + def count(self): + """Returns the count of the population values""" + return self._count + + @property + def sum_data(self): + """Returns the sum of the population values.""" + return self._sum_data + + @property + def snapshot(self): + """Returns the values calculated over a sliding time window.""" + return self._snapshot + + +class Snapshot(object): + """Represents the summary observation of the recorded events over a + sliding time window. + + :type count: long + :param count: the number of values in the snapshot. + + :type sum_data: float + :param sum_data: the sum of values in the snapshot. + + :type value_at_percentiles: ValueAtPercentile + :param value_at_percentiles: a list of values at different percentiles + of the distribution calculated from the current snapshot. The percentiles + must be strictly increasing. + """ + + def __init__(self, count, sum_data, value_at_percentiles=None): + check_count_and_sum(count, sum_data) + self._count = count + self._sum_data = sum_data + + if value_at_percentiles is None: + value_at_percentiles = [] + + if not isinstance(value_at_percentiles, list): + raise ValueError('value_at_percentiles must be an ' + 'instance of list') + + self._value_at_percentiles = value_at_percentiles + + @property + def count(self): + """Returns the number of values in the snapshot""" + return self._count + + @property + def sum_data(self): + """Returns the sum of values in the snapshot.""" + return self._sum_data + + @property + def value_at_percentiles(self): + """Returns a list of values at different percentiles + of the distribution calculated from the current snapshot. + """ + return self._value_at_percentiles + + +class ValueAtPercentile(object): + """Represents the value at a given percentile of a distribution. + + :type percentile: float + :param percentile: the percentile in the ValueAtPercentile. + + :type value: float + :param value: the value in the ValueAtPercentile. + """ + + def __init__(self, percentile, value): + + if not 0 < percentile <= 100.0: + raise ValueError("percentile must be in the interval (0.0, 100.0]") + + self._percentile = percentile + + if value < 0: + raise ValueError('value must be non-negative') + + self._value = value + + @property + def percentile(self): + """Returns the percentile in the ValueAtPercentile""" + return self._percentile + + @property + def value(self): + """Returns the value in the ValueAtPercentile""" + return self._value + + +def check_count_and_sum(count, sum_data): + if not (count is None or count >= 0): + raise ValueError('count must be non-negative') + + if not (sum_data is None or sum_data >= 0): + raise ValueError('sum_data must be non-negative') + + if count == 0 and sum_data != 0: + raise ValueError('sum_data must be 0 if count is 0') diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/time_series.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/time_series.py new file mode 100644 index 0000000000000000000000000000000000000000..637f8a17ffd264963920fff8e965453f9e7ab5c7 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/time_series.py @@ -0,0 +1,90 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class TimeSeries(object): + """Time series data for a given metric and time interval. + + This class implements the spec for v1 TimeSeries structs as of + opencensus-proto release v0.1.0. See opencensus-proto for details: + + https://github.com/census-instrumentation/opencensus-proto/blob/v0.1.0/src/opencensus/proto/metrics/v1/metrics.proto#L132 + + A TimeSeries is a collection of data points that describes the time-varying + values of a metric. + + :type label_values: list(:class: + '~opencensus.metrics.label_value.LabelValue') + :param label_values: The set of label values that uniquely identify this + timeseries. + + :type points: list(:class: '~opencensus.metrics.export.point.Point') + :param points: The data points of this timeseries. + + :type start_timestamp: str + :param start_timestamp: The time when the cumulative value was reset to + zero, must be set for cumulative metrics. + """ # noqa + + def __init__(self, label_values, points, start_timestamp): + if label_values is None: + raise ValueError("label_values must not be None") + if not points: + raise ValueError("points must not be null or empty") + self._label_values = label_values + self._points = points + self._start_timestamp = start_timestamp + + def __repr__(self): + points_repr = '[{}]'.format( + ', '.join(repr(point.value) for point in self.points)) + + lv_repr = tuple(lv.value for lv in self.label_values) + return ('{}({}, label_values={}, start_timestamp={})' + .format( + type(self).__name__, + points_repr, + lv_repr, + self.start_timestamp + )) + + @property + def start_timestamp(self): + return self._start_timestamp + + @property + def label_values(self): + return self._label_values + + @property + def points(self): + return self._points + + def check_points_type(self, type_class): + """Check that each point's value is an instance of `type_class`. + + `type_class` should typically be a Value type, i.e. one that extends + :class: `opencensus.metrics.export.value.Value`. + + :type type_class: type + :param type_class: Type to check against. + + :rtype: bool + :return: Whether all points are instances of `type_class`. + """ + for point in self.points: + if (point.value is not None + and not isinstance(point.value, type_class)): + return False + return True diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/export/value.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/value.py new file mode 100644 index 0000000000000000000000000000000000000000..e3aa66c81c48c235aae199ca73eb291b1364822f --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/export/value.py @@ -0,0 +1,307 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +The classes in this module implement the spec for v1 Metrics as of +opencensus-proto release v0.1.0. See opencensus-proto for details: + +https://github.com/census-instrumentation/opencensus-proto/blob/v0.1.0/src/opencensus/proto/metrics/v1/metrics.proto +""" # noqa + +from copy import copy + + +class ValueDouble(object): + """A 64-bit double-precision floating-point number. + + :type value: float + :param value: the value in float. + """ + + def __init__(self, value): + self._value = value + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.value, + )) + + @property + def value(self): + return self._value + + +class ValueLong(object): + """A 64-bit integer. + + :type value: long + :param value: the value in long. + """ + + def __init__(self, value): + self._value = value + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.value, + )) + + @property + def value(self): + return self._value + + +class ValueSummary(object): + """Represents a snapshot values calculated over an arbitrary time window. + + :type value: summary + :param value: the value in summary. + """ + + def __init__(self, value): + self._value = value + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.value, + )) + + @property + def value(self): + return self._value + + +class Exemplar(object): + """An example point to annotate a given value in a bucket. + + Exemplars are example points that may be used to annotate aggregated + Distribution values. They are metadata that gives information about a + particular value added to a Distribution bucket. + + :type value: double + :param value: Value of the exemplar point, determines which bucket the + exemplar belongs to. + + :type timestamp: str + :param timestamp: The observation (sampling) time of the exemplar value. + + :type attachments: dict(str, str) + :param attachments: Contextual information about the example value. + """ + + def __init__(self, value, timestamp, attachments): + self._value = value + self._timestamp = timestamp + self._attachments = attachments + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.value, + )) + + @property + def value(self): + return self._value + + @property + def timestamp(self): + return self._timestamp + + @property + def attachments(self): + return self._attachments + + +class Bucket(object): + """A bucket of a histogram. + + :type count: int + :param count: The number of values in each bucket of the histogram. + + :type exemplar: Exemplar + :param exemplar: Optional exemplar for this bucket, omit if the + distribution does not have a histogram. + """ + + def __init__(self, count, exemplar=None): + self._count = count + self._exemplar = exemplar + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.count, + )) + + @property + def count(self): + return self._count + + @property + def exemplar(self): + return self._exemplar + + +class Explicit(object): + """Set of explicit bucket boundaries. + + Specifies a set of buckets with arbitrary upper-bounds. This defines + size(bounds) + 1 (= N) buckets. The boundaries for bucket index i are: + + - [0, bounds[i]) for i == 0 + - [bounds[i-1], bounds[i]) for 0 < i < N-1 + - [bounds[i-1], +infinity) for i == N-1 + """ + + def __init__(self, bounds): + if not bounds: + raise ValueError("Bounds must not be null or empty") + if bounds != sorted(set(bounds)): + raise ValueError("Bounds must be strictly increasing") + if bounds[0] <= 0: + raise ValueError("Bounds must be positive") + self._bounds = bounds + + @property + def bounds(self): + return copy(self._bounds) + + +class BucketOptions(object): + """Container for bucket options, including explicit boundaries. + + A Distribution may optionally contain a histogram of the values in the + population. The bucket boundaries for that histogram are described by + BucketOptions. + + If bucket_options has no type, then there is no histogram associated with + the Distribution. + """ + + def __init__(self, type_=None): + self._type = type_ + + def __repr__(self): + return ("{}({})" + .format( + type(self).__name__, + self.type_, + )) + + @property + def type_(self): + return self._type + + +class ValueDistribution(object): + """Summary statistics for a population of values. + + Distribution contains summary statistics for a population of values. It + optionally contains a histogram representing the distribution of those + values across a set of buckets. + + :type count: int + :param count: The number of values in the population. + + :type sum_: float + :param sum_: The sum of the values in the population. + + :type sum_of_squared_deviation: float + :param sum_of_squared_deviation: The sum of squared deviations from the + mean of the values in the population. + + :type bucket_options: :class: 'BucketOptions' + :param bucket_options: Bucket boundaries for the histogram of the values in + the population. + + :type buckets: list(:class: 'Bucket') + :param buckets: Histogram buckets for the given bucket boundaries. + """ + + def __init__(self, + count, + sum_, + sum_of_squared_deviation, + bucket_options, + buckets=None): + if count < 0: + raise ValueError("count must be non-negative") + elif count == 0: + if sum_ != 0: + raise ValueError("sum_ must be 0 if count is 0") + if sum_of_squared_deviation != 0: + raise ValueError("sum_of_squared_deviation must be 0 if count " + "is 0") + if bucket_options is None: + raise ValueError("bucket_options must not be null") + if bucket_options.type_ is None: + if buckets is not None: + raise ValueError("buckets must be null if the distribution " + "has no histogram (i.e. bucket_options.type " + "is null)") + else: + if len(buckets) != len(bucket_options.type_.bounds) + 1: + # Note that this includes the implicit 0 and positive-infinity + # boundaries, so bounds [1, 2] implies three buckets: [[0, 1), + # [1, 2), [2, inf)]. + raise ValueError("There must be one bucket for each pair of " + "boundaries") + if count != sum(bucket.count for bucket in buckets): + raise ValueError("The distribution count must equal the sum " + "of bucket counts") + self._count = count + self._sum = sum_ + self._sum_of_squared_deviation = sum_of_squared_deviation + self._bucket_options = bucket_options + self._buckets = buckets + + def __repr__(self): + try: + bounds = self.bucket_options.type_.bounds, + except AttributeError: + bounds = None + + return ("{}({})" + .format( + type(self).__name__, + bounds + )) + + @property + def count(self): + return self._count + + @property + def sum(self): + return self._sum + + @property + def sum_of_squared_deviation(self): + return self._sum_of_squared_deviation + + @property + def bucket_options(self): + return self._bucket_options + + @property + def buckets(self): + return self._buckets diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/label_key.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/label_key.py new file mode 100644 index 0000000000000000000000000000000000000000..f6cfa156878af82ad1fff804a347bb3e7f1609fd --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/label_key.py @@ -0,0 +1,51 @@ +# Copyright 2018, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class LabelKey(object): + """The label keys associated with the metric descriptor. + + :type key: str + :param key: the key for the label + + :type description: str + :param description: description of the label + """ + def __init__(self, key, description): + self._key = key + self._description = description + + def __repr__(self): + if self.description: + return ('{}({}, description="{}")' + .format( + type(self).__name__, + self.key, + self.description + )) + return ("{}({})" + .format( + type(self).__name__, + self.key, + )) + + @property + def key(self): + """the key for the label""" + return self._key + + @property + def description(self): + """a human-readable description of what this label key represents""" + return self._description diff --git a/.venv/lib/python3.11/site-packages/opencensus/metrics/transport.py b/.venv/lib/python3.11/site-packages/opencensus/metrics/transport.py new file mode 100644 index 0000000000000000000000000000000000000000..33253db8e20e00820bab42cd4754cdfaf117f098 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/opencensus/metrics/transport.py @@ -0,0 +1,145 @@ +# Copyright 2019, OpenCensus Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import logging + +from opencensus.common import utils +from opencensus.common.schedule import PeriodicTask +from opencensus.trace import execution_context + +logger = logging.getLogger(__name__) + +DEFAULT_INTERVAL = 60 +GRACE_PERIOD = 5 + + +class TransportError(Exception): + pass + + +class PeriodicMetricTask(PeriodicTask): + """Thread that periodically calls a given function. + + :type interval: int or float + :param interval: Seconds between calls to the function. + + :type function: function + :param function: The function to call. + + :type args: list + :param args: The args passed in while calling `function`. + + :type kwargs: dict + :param args: The kwargs passed in while calling `function`. + + :type name: str + :param name: The source of the worker. Used for naming. + """ + + daemon = True + + def __init__( + self, + interval=None, + function=None, + args=None, + kwargs=None, + name=None + ): + if interval is None: + interval = DEFAULT_INTERVAL + + self.func = function + self.args = args + self.kwargs = kwargs + + def func(*aa, **kw): + try: + return self.func(*aa, **kw) + except TransportError as ex: + logger.exception(ex) + self.cancel() + except Exception as ex: + logger.exception("Error handling metric export: {}".format(ex)) + + super(PeriodicMetricTask, self).__init__( + interval, func, args, kwargs, '{} Worker'.format(name) + ) + + def run(self): + # Indicate that this thread is an exporter thread. + # Used to suppress tracking of requests in this thread + execution_context.set_is_exporter(True) + super(PeriodicMetricTask, self).run() + + def close(self): + try: + # Suppress request tracking on flush + execution_context.set_is_exporter(True) + self.func(*self.args, **self.kwargs) + execution_context.set_is_exporter(False) + except Exception as ex: + logger.exception("Error handling metric flush: {}".format(ex)) + self.cancel() + + +def get_exporter_thread(metric_producers, exporter, interval=None): + """Get a running task that periodically exports metrics. + + Get a `PeriodicTask` that periodically calls: + + export(itertools.chain(*all_gets)) + + where all_gets is the concatenation of all metrics produced by the metric + producers in metric_producers, each calling metric_producer.get_metrics() + + :type metric_producers: + list(:class:`opencensus.metrics.export.metric_producer.MetricProducer`) + :param metric_producers: The list of metric producers to use to get metrics + + :type exporter: :class:`opencensus.stats.base_exporter.MetricsExporter` + :param exporter: The exporter to use to export metrics. + + :type interval: int or float + :param interval: Seconds between export calls. + + :rtype: :class:`PeriodicTask` + :return: A running thread responsible calling the exporter. + + """ + weak_gets = [utils.get_weakref(producer.get_metrics) + for producer in metric_producers] + weak_export = utils.get_weakref(exporter.export_metrics) + + def export_all(): + all_gets = [] + for weak_get in weak_gets: + get = weak_get() + if get is None: + raise TransportError("Metric producer is not available") + all_gets.append(get()) + export = weak_export() + if export is None: + raise TransportError("Metric exporter is not available") + + export(itertools.chain(*all_gets)) + + tt = PeriodicMetricTask( + interval, + export_all, + name=exporter.__class__.__name__ + ) + tt.start() + return tt diff --git a/.venv/lib/python3.11/site-packages/opencensus/trace/tracers/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/opencensus/trace/tracers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54dc6056679d9620e41bf1b581ca9a9f002355b9 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/opencensus/trace/tracers/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/__init__.py b/.venv/lib/python3.11/site-packages/proto/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8780991c0426bcd08c7bbb134411539c9a7ff7ea --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/__init__.py @@ -0,0 +1,72 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .enums import Enum +from .fields import Field +from .fields import MapField +from .fields import RepeatedField +from .marshal import Marshal +from .message import Message +from .modules import define_module as module +from .primitives import ProtoType +from .version import __version__ + + +DOUBLE = ProtoType.DOUBLE +FLOAT = ProtoType.FLOAT +INT64 = ProtoType.INT64 +UINT64 = ProtoType.UINT64 +INT32 = ProtoType.INT32 +FIXED64 = ProtoType.FIXED64 +FIXED32 = ProtoType.FIXED32 +BOOL = ProtoType.BOOL +STRING = ProtoType.STRING +MESSAGE = ProtoType.MESSAGE +BYTES = ProtoType.BYTES +UINT32 = ProtoType.UINT32 +ENUM = ProtoType.ENUM +SFIXED32 = ProtoType.SFIXED32 +SFIXED64 = ProtoType.SFIXED64 +SINT32 = ProtoType.SINT32 +SINT64 = ProtoType.SINT64 + + +__all__ = ( + "__version__", + "Enum", + "Field", + "MapField", + "RepeatedField", + "Marshal", + "Message", + "module", + # Expose the types directly. + "DOUBLE", + "FLOAT", + "INT64", + "UINT64", + "INT32", + "FIXED64", + "FIXED32", + "BOOL", + "STRING", + "MESSAGE", + "BYTES", + "UINT32", + "ENUM", + "SFIXED32", + "SFIXED64", + "SINT32", + "SINT64", +) diff --git a/.venv/lib/python3.11/site-packages/proto/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..22c5991fab7797f65318f925e5d399f0b5b03c45 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/__pycache__/datetime_helpers.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/__pycache__/datetime_helpers.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d08b58276a105c016feaa5d2e9fcf0f0d26d9c79 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/__pycache__/datetime_helpers.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/__pycache__/enums.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/__pycache__/enums.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34a2d4d59551e11052a1381e346c7c11b08ac69c Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/__pycache__/enums.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/__pycache__/fields.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/__pycache__/fields.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aee1001153efab376fd6c18dc9f716fa15ad194a Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/__pycache__/fields.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/enums.py b/.venv/lib/python3.11/site-packages/proto/enums.py new file mode 100644 index 0000000000000000000000000000000000000000..4073c2a3c677dfa67d76462df1b5e8eea59c751f --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/enums.py @@ -0,0 +1,163 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum + +from google.protobuf import descriptor_pb2 + +from proto import _file_info +from proto import _package_info +from proto.marshal.rules.enums import EnumRule + + +class ProtoEnumMeta(enum.EnumMeta): + """A metaclass for building and registering protobuf enums.""" + + def __new__(mcls, name, bases, attrs): + # Do not do any special behavior for `proto.Enum` itself. + if bases[0] == enum.IntEnum: + return super().__new__(mcls, name, bases, attrs) + + # Get the essential information about the proto package, and where + # this component belongs within the file. + package, marshal = _package_info.compile(name, attrs) + + # Determine the local path of this proto component within the file. + local_path = tuple(attrs.get("__qualname__", name).split(".")) + + # Sanity check: We get the wrong full name if a class is declared + # inside a function local scope; correct this. + if "" in local_path: + ix = local_path.index("") + local_path = local_path[: ix - 1] + local_path[ix + 1 :] + + # Determine the full name in protocol buffers. + full_name = ".".join((package,) + local_path).lstrip(".") + filename = _file_info._FileInfo.proto_file_name( + attrs.get("__module__", name.lower()) + ) + + # Retrieve any enum options. + # We expect something that looks like an EnumOptions message, + # either an actual instance or a dict-like representation. + pb_options = "_pb_options" + opts = attrs.pop(pb_options, {}) + # This is the only portable way to remove the _pb_options name + # from the enum attrs. + # In 3.7 onwards, we can define an _ignore_ attribute and do some + # mucking around with that. + if pb_options in attrs._member_names: + if isinstance(attrs._member_names, list): + idx = attrs._member_names.index(pb_options) + attrs._member_names.pop(idx) + else: # Python 3.11.0b3 + del attrs._member_names[pb_options] + + # Make the descriptor. + enum_desc = descriptor_pb2.EnumDescriptorProto( + name=name, + # Note: the superclass ctor removes the variants, so get them now. + # Note: proto3 requires that the first variant value be zero. + value=sorted( + ( + descriptor_pb2.EnumValueDescriptorProto(name=name, number=number) + # Minor hack to get all the enum variants out. + # Use the `_member_names` property to get only the enum members + # See https://github.com/googleapis/proto-plus-python/issues/490 + for name, number in attrs.items() + if name in attrs._member_names and isinstance(number, int) + ), + key=lambda v: v.number, + ), + options=opts, + ) + + file_info = _file_info._FileInfo.maybe_add_descriptor(filename, package) + if len(local_path) == 1: + file_info.descriptor.enum_type.add().MergeFrom(enum_desc) + else: + file_info.nested_enum[local_path] = enum_desc + + # Run the superclass constructor. + cls = super().__new__(mcls, name, bases, attrs) + + # We can't just add a "_meta" element to attrs because the Enum + # machinery doesn't know what to do with a non-int value. + # The pb is set later, in generate_file_pb + cls._meta = _EnumInfo(full_name=full_name, pb=None) + + file_info.enums[full_name] = cls + + # Register the enum with the marshal. + marshal.register(cls, EnumRule(cls)) + + # Generate the descriptor for the file if it is ready. + if file_info.ready(new_class=cls): + file_info.generate_file_pb(new_class=cls, fallback_salt=full_name) + + # Done; return the class. + return cls + + +class Enum(enum.IntEnum, metaclass=ProtoEnumMeta): + """A enum object that also builds a protobuf enum descriptor.""" + + def _comparable(self, other): + # Avoid 'isinstance' to prevent other IntEnums from matching + return type(other) in (type(self), int) + + def __hash__(self): + return hash(self.value) + + def __eq__(self, other): + if not self._comparable(other): + return NotImplemented + + return self.value == int(other) + + def __ne__(self, other): + if not self._comparable(other): + return NotImplemented + + return self.value != int(other) + + def __lt__(self, other): + if not self._comparable(other): + return NotImplemented + + return self.value < int(other) + + def __le__(self, other): + if not self._comparable(other): + return NotImplemented + + return self.value <= int(other) + + def __ge__(self, other): + if not self._comparable(other): + return NotImplemented + + return self.value >= int(other) + + def __gt__(self, other): + if not self._comparable(other): + return NotImplemented + + return self.value > int(other) + + +class _EnumInfo: + def __init__(self, *, full_name: str, pb): + self.full_name = full_name + self.pb = pb diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/__init__.py b/.venv/lib/python3.11/site-packages/proto/marshal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..621ea3695f931708b24e8d9dad4c6907d8947b15 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .marshal import Marshal + + +__all__ = ("Marshal",) diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/collections/__init__.py b/.venv/lib/python3.11/site-packages/proto/marshal/collections/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4b80a546c26a5cab35ea7eb02a781f3fd70f6d31 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/collections/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .maps import MapComposite +from .repeated import Repeated +from .repeated import RepeatedComposite + + +__all__ = ( + "MapComposite", + "Repeated", + "RepeatedComposite", +) diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/collections/__pycache__/maps.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/collections/__pycache__/maps.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f60ac127a6b7099a121abdc7388eeb2044244b64 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/collections/__pycache__/maps.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/collections/__pycache__/repeated.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/collections/__pycache__/repeated.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dad0765d62c14e5495355cae1ecb66ea009db88f Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/collections/__pycache__/repeated.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/collections/maps.py b/.venv/lib/python3.11/site-packages/proto/marshal/collections/maps.py new file mode 100644 index 0000000000000000000000000000000000000000..3c4857161458291b3dc98c5a37d1f0bf4dae4dab --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/collections/maps.py @@ -0,0 +1,82 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + +from proto.utils import cached_property +from google.protobuf.message import Message + + +class MapComposite(collections.abc.MutableMapping): + """A view around a mutable sequence in protocol buffers. + + This implements the full Python MutableMapping interface, but all methods + modify the underlying field container directly. + """ + + @cached_property + def _pb_type(self): + """Return the protocol buffer type for this sequence.""" + # Huzzah, another hack. Still less bad than RepeatedComposite. + return type(self.pb.GetEntryClass()().value) + + def __init__(self, sequence, *, marshal): + """Initialize a wrapper around a protobuf map. + + Args: + sequence: A protocol buffers map. + marshal (~.MarshalRegistry): An instantiated marshal, used to + convert values going to and from this map. + """ + self._pb = sequence + self._marshal = marshal + + def __contains__(self, key): + # Protocol buffers is so permissive that querying for the existence + # of a key will in of itself create it. + # + # By taking a tuple of the keys and querying that, we avoid sending + # the lookup to protocol buffers and therefore avoid creating the key. + return key in tuple(self.keys()) + + def __getitem__(self, key): + # We handle raising KeyError ourselves, because otherwise protocol + # buffers will create the key if it does not exist. + if key not in self: + raise KeyError(key) + return self._marshal.to_python(self._pb_type, self.pb[key]) + + def __setitem__(self, key, value): + pb_value = self._marshal.to_proto(self._pb_type, value, strict=True) + # Directly setting a key is not allowed; however, protocol buffers + # is so permissive that querying for the existence of a key will in + # of itself create it. + # + # Therefore, we create a key that way (clearing any fields that may + # be set) and then merge in our values. + self.pb[key].Clear() + self.pb[key].MergeFrom(pb_value) + + def __delitem__(self, key): + self.pb.pop(key) + + def __len__(self): + return len(self.pb) + + def __iter__(self): + return iter(self.pb) + + @property + def pb(self): + return self._pb diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/collections/repeated.py b/.venv/lib/python3.11/site-packages/proto/marshal/collections/repeated.py new file mode 100644 index 0000000000000000000000000000000000000000..a6560411cd71ac15a68f617ce7fe1c6e1e9cbabb --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/collections/repeated.py @@ -0,0 +1,189 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import copy +from typing import Iterable + +from proto.utils import cached_property + + +class Repeated(collections.abc.MutableSequence): + """A view around a mutable sequence in protocol buffers. + + This implements the full Python MutableSequence interface, but all methods + modify the underlying field container directly. + """ + + def __init__(self, sequence, *, marshal, proto_type=None): + """Initialize a wrapper around a protobuf repeated field. + + Args: + sequence: A protocol buffers repeated field. + marshal (~.MarshalRegistry): An instantiated marshal, used to + convert values going to and from this map. + """ + self._pb = sequence + self._marshal = marshal + self._proto_type = proto_type + + def __copy__(self): + """Copy this object and return the copy.""" + return type(self)(self.pb[:], marshal=self._marshal) + + def __delitem__(self, key): + """Delete the given item.""" + del self.pb[key] + + def __eq__(self, other): + if hasattr(other, "pb"): + return tuple(self.pb) == tuple(other.pb) + return tuple(self.pb) == tuple(other) if isinstance(other, Iterable) else False + + def __getitem__(self, key): + """Return the given item.""" + return self.pb[key] + + def __len__(self): + """Return the length of the sequence.""" + return len(self.pb) + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return repr([*self]) + + def __setitem__(self, key, value): + self.pb[key] = value + + def insert(self, index: int, value): + """Insert ``value`` in the sequence before ``index``.""" + self.pb.insert(index, value) + + def sort(self, *, key: str = None, reverse: bool = False): + """Stable sort *IN PLACE*.""" + self.pb.sort(key=key, reverse=reverse) + + @property + def pb(self): + return self._pb + + +class RepeatedComposite(Repeated): + """A view around a mutable sequence of messages in protocol buffers. + + This implements the full Python MutableSequence interface, but all methods + modify the underlying field container directly. + """ + + @cached_property + def _pb_type(self): + """Return the protocol buffer type for this sequence.""" + # Provide the marshal-given proto_type, if any. + # Used for RepeatedComposite of Enum. + if self._proto_type is not None: + return self._proto_type + + # There is no public-interface mechanism to determine the type + # of what should go in the list (and the C implementation seems to + # have no exposed mechanism at all). + # + # If the list has members, use the existing list members to + # determine the type. + if len(self.pb) > 0: + return type(self.pb[0]) + + # We have no members in the list, so we get the type from the attributes. + if hasattr(self.pb, "_message_descriptor") and hasattr( + self.pb._message_descriptor, "_concrete_class" + ): + return self.pb._message_descriptor._concrete_class + + # Fallback logic in case attributes are not available + # In order to get the type, we create a throw-away copy and add a + # blank member to it. + canary = copy.deepcopy(self.pb).add() + return type(canary) + + def __eq__(self, other): + if super().__eq__(other): + return True + return ( + tuple([i for i in self]) == tuple(other) + if isinstance(other, Iterable) + else False + ) + + def __getitem__(self, key): + return self._marshal.to_python(self._pb_type, self.pb[key]) + + def __setitem__(self, key, value): + # The underlying protocol buffer does not define __setitem__, so we + # have to implement all the operations on our own. + + # If ``key`` is an integer, as in list[index] = value: + if isinstance(key, int): + if -len(self) <= key < len(self): + self.pop(key) # Delete the old item. + self.insert(key, value) # Insert the new item in its place. + else: + raise IndexError("list assignment index out of range") + + # If ``key`` is a slice object, as in list[start:stop:step] = [values]: + elif isinstance(key, slice): + start, stop, step = key.indices(len(self)) + + if not isinstance(value, collections.abc.Iterable): + raise TypeError("can only assign an iterable") + + if step == 1: # Is not an extended slice. + # Assign all the new values to the sliced part, replacing the + # old values, if any, and unconditionally inserting those + # values whose indices already exceed the slice length. + for index, item in enumerate(value): + if start + index < stop: + self.pop(start + index) + self.insert(start + index, item) + + # If there are less values than the length of the slice, remove + # the remaining elements so that the slice adapts to the + # newly provided values. + for _ in range(stop - start - len(value)): + self.pop(start + len(value)) + + else: # Is an extended slice. + indices = range(start, stop, step) + + if len(value) != len(indices): # XXX: Use PEP 572 on 3.8+ + raise ValueError( + f"attempt to assign sequence of size " + f"{len(value)} to extended slice of size " + f"{len(indices)}" + ) + + # Assign each value to its index, calling this function again + # with individual integer indexes that get processed above. + for index, item in zip(indices, value): + self[index] = item + + else: + raise TypeError( + f"list indices must be integers or slices, not {type(key).__name__}" + ) + + def insert(self, index: int, value): + """Insert ``value`` in the sequence before ``index``.""" + pb_value = self._marshal.to_proto(self._pb_type, value) + self.pb.insert(index, pb_value) diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/compat.py b/.venv/lib/python3.11/site-packages/proto/marshal/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..b393acf5a447cc79913be8d3420e0bdc64c77382 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/compat.py @@ -0,0 +1,64 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file pulls in the container types from internal protocol buffers, +# and exports the types available. +# +# If the C extensions were not installed, then their container types will +# not be included. + +from google.protobuf.internal import containers + +# Import all message types to ensure that pyext types are recognized +# when upb types exist. Conda's protobuf defaults to pyext despite upb existing. +# See https://github.com/googleapis/proto-plus-python/issues/470 +try: + from google._upb import _message as _message_upb +except ImportError: + _message_upb = None + +try: + from google.protobuf.pyext import _message as _message_pyext +except ImportError: + _message_pyext = None + + +repeated_composite_types = (containers.RepeatedCompositeFieldContainer,) +repeated_scalar_types = (containers.RepeatedScalarFieldContainer,) +map_composite_types = (containers.MessageMap,) + +# In `proto/marshal.py`, for compatibility with protobuf 5.x, +# we'll use `map_composite_type_names` to check whether +# the name of the class of a protobuf type is +# `MessageMapContainer`, and, if `True`, return a MapComposite. +# See https://github.com/protocolbuffers/protobuf/issues/16596 +map_composite_type_names = ("MessageMapContainer",) + +for message in [_message_upb, _message_pyext]: + if message: + repeated_composite_types += (message.RepeatedCompositeContainer,) + repeated_scalar_types += (message.RepeatedScalarContainer,) + + try: + map_composite_types += (message.MessageMapContainer,) + except AttributeError: + # The `MessageMapContainer` attribute is not available in Protobuf 5.x+ + pass + +__all__ = ( + "repeated_composite_types", + "repeated_scalar_types", + "map_composite_types", + "map_composite_type_names", +) diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/marshal.py b/.venv/lib/python3.11/site-packages/proto/marshal/marshal.py new file mode 100644 index 0000000000000000000000000000000000000000..d278421a57975c7fd12e26e37cb5b83f1f7e222b --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/marshal.py @@ -0,0 +1,297 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import abc +import enum + +from google.protobuf import message +from google.protobuf import duration_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import wrappers_pb2 + +from proto.marshal import compat +from proto.marshal.collections import MapComposite +from proto.marshal.collections import Repeated +from proto.marshal.collections import RepeatedComposite + +from proto.marshal.rules import bytes as pb_bytes +from proto.marshal.rules import stringy_numbers +from proto.marshal.rules import dates +from proto.marshal.rules import struct +from proto.marshal.rules import wrappers +from proto.marshal.rules import field_mask +from proto.primitives import ProtoType + + +class Rule(abc.ABC): + """Abstract class definition for marshal rules.""" + + @classmethod + def __subclasshook__(cls, C): + if hasattr(C, "to_python") and hasattr(C, "to_proto"): + return True + return NotImplemented + + +class BaseMarshal: + """The base class to translate between protobuf and Python classes. + + Protocol buffers defines many common types (e.g. Timestamp, Duration) + which also exist in the Python standard library. The marshal essentially + translates between these: it keeps a registry of common protocol buffers + and their Python representations, and translates back and forth. + + The protocol buffer class is always the "key" in this relationship; when + presenting a message, the declared field types are used to determine + whether a value should be transformed into another class. Similarly, + when accepting a Python value (when setting a field, for example), + the declared field type is still used. This means that, if appropriate, + multiple protocol buffer types may use the same Python type. + + The primary implementation of this is :class:`Marshal`, which should + usually be used instead of this class directly. + """ + + def __init__(self): + self._rules = {} + self._noop = NoopRule() + self.reset() + + def register(self, proto_type: type, rule: Rule = None): + """Register a rule against the given ``proto_type``. + + This function expects a ``proto_type`` (the descriptor class) and + a ``rule``; an object with a ``to_python`` and ``to_proto`` method. + Each method should return the appropriate Python or protocol buffer + type, and be idempotent (e.g. accept either type as input). + + This function can also be used as a decorator:: + + @marshal.register(timestamp_pb2.Timestamp) + class TimestampRule: + ... + + In this case, the class will be initialized for you with zero + arguments. + + Args: + proto_type (type): A protocol buffer message type. + rule: A marshal object + """ + # If a rule was provided, register it and be done. + if rule: + # Ensure the rule implements Rule. + if not isinstance(rule, Rule): + raise TypeError( + "Marshal rule instances must implement " + "`to_proto` and `to_python` methods." + ) + + # Register the rule. + self._rules[proto_type] = rule + return + + # Create an inner function that will register an instance of the + # marshal class to this object's registry, and return it. + def register_rule_class(rule_class: type): + # Ensure the rule class is a valid rule. + if not issubclass(rule_class, Rule): + raise TypeError( + "Marshal rule subclasses must implement " + "`to_proto` and `to_python` methods." + ) + + # Register the rule class. + self._rules[proto_type] = rule_class() + return rule_class + + return register_rule_class + + def reset(self): + """Reset the registry to its initial state.""" + self._rules.clear() + + # Register date and time wrappers. + self.register(timestamp_pb2.Timestamp, dates.TimestampRule()) + self.register(duration_pb2.Duration, dates.DurationRule()) + + # Register FieldMask wrappers. + self.register(field_mask_pb2.FieldMask, field_mask.FieldMaskRule()) + + # Register nullable primitive wrappers. + self.register(wrappers_pb2.BoolValue, wrappers.BoolValueRule()) + self.register(wrappers_pb2.BytesValue, wrappers.BytesValueRule()) + self.register(wrappers_pb2.DoubleValue, wrappers.DoubleValueRule()) + self.register(wrappers_pb2.FloatValue, wrappers.FloatValueRule()) + self.register(wrappers_pb2.Int32Value, wrappers.Int32ValueRule()) + self.register(wrappers_pb2.Int64Value, wrappers.Int64ValueRule()) + self.register(wrappers_pb2.StringValue, wrappers.StringValueRule()) + self.register(wrappers_pb2.UInt32Value, wrappers.UInt32ValueRule()) + self.register(wrappers_pb2.UInt64Value, wrappers.UInt64ValueRule()) + + # Register the google.protobuf.Struct wrappers. + # + # These are aware of the marshal that created them, because they + # create RepeatedComposite and MapComposite instances directly and + # need to pass the marshal to them. + self.register(struct_pb2.Value, struct.ValueRule(marshal=self)) + self.register(struct_pb2.ListValue, struct.ListValueRule(marshal=self)) + self.register(struct_pb2.Struct, struct.StructRule(marshal=self)) + + # Special case for bytes to allow base64 encode/decode + self.register(ProtoType.BYTES, pb_bytes.BytesRule()) + + # Special case for int64 from strings because of dict round trip. + # See https://github.com/protocolbuffers/protobuf/issues/2679 + for rule_class in stringy_numbers.STRINGY_NUMBER_RULES: + self.register(rule_class._proto_type, rule_class()) + + def get_rule(self, proto_type): + # Rules are needed to convert values between proto-plus and pb. + # Retrieve the rule for the specified proto type. + # The NoopRule will be used when a rule is not found. + rule = self._rules.get(proto_type, self._noop) + + # If we don't find a rule, also check under `_instances` + # in case there is a rule in another package. + # See https://github.com/googleapis/proto-plus-python/issues/349 + if rule == self._noop and hasattr(self, "_instances"): + for _, instance in self._instances.items(): + rule = instance._rules.get(proto_type, self._noop) + if rule != self._noop: + break + return rule + + def to_python(self, proto_type, value, *, absent: bool = None): + # Internal protobuf has its own special type for lists of values. + # Return a view around it that implements MutableSequence. + value_type = type(value) # Minor performance boost over isinstance + if value_type in compat.repeated_composite_types: + return RepeatedComposite(value, marshal=self) + if value_type in compat.repeated_scalar_types: + if isinstance(proto_type, type): + return RepeatedComposite(value, marshal=self, proto_type=proto_type) + else: + return Repeated(value, marshal=self) + + # Same thing for maps of messages. + # See https://github.com/protocolbuffers/protobuf/issues/16596 + # We need to look up the name of the type in compat.map_composite_type_names + # as class `MessageMapContainer` is no longer exposed + # This is done to avoid taking a breaking change in proto-plus. + if ( + value_type in compat.map_composite_types + or value_type.__name__ in compat.map_composite_type_names + ): + return MapComposite(value, marshal=self) + return self.get_rule(proto_type=proto_type).to_python(value, absent=absent) + + def to_proto(self, proto_type, value, *, strict: bool = False): + # The protos in google/protobuf/struct.proto are exceptional cases, + # because they can and should represent themselves as lists and dicts. + # These cases are handled in their rule classes. + if proto_type not in ( + struct_pb2.Value, + struct_pb2.ListValue, + struct_pb2.Struct, + ): + # For our repeated and map view objects, simply return the + # underlying pb. + if isinstance(value, (Repeated, MapComposite)): + return value.pb + + # Convert lists and tuples recursively. + if isinstance(value, (list, tuple)): + return type(value)(self.to_proto(proto_type, i) for i in value) + + # Convert dictionaries recursively when the proto type is a map. + # This is slightly more complicated than converting a list or tuple + # because we have to step through the magic that protocol buffers does. + # + # Essentially, a type of map will show up here as + # a FoosEntry with a `key` field, `value` field, and a `map_entry` + # annotation. We need to do the conversion based on the `value` + # field's type. + if isinstance(value, dict) and ( + proto_type.DESCRIPTOR.has_options + and proto_type.DESCRIPTOR.GetOptions().map_entry + ): + recursive_type = type(proto_type().value) + return {k: self.to_proto(recursive_type, v) for k, v in value.items()} + + pb_value = self.get_rule(proto_type=proto_type).to_proto(value) + + # Sanity check: If we are in strict mode, did we get the value we want? + if strict and not isinstance(pb_value, proto_type): + raise TypeError( + "Parameter must be instance of the same class; " + "expected {expected}, got {got}".format( + expected=proto_type.__name__, + got=pb_value.__class__.__name__, + ), + ) + # Return the final value. + return pb_value + + +class Marshal(BaseMarshal): + """The translator between protocol buffer and Python instances. + + The bulk of the implementation is in :class:`BaseMarshal`. This class + adds identity tracking: multiple instantiations of :class:`Marshal` with + the same name will provide the same instance. + """ + + _instances = {} + + def __new__(cls, *, name: str): + """Create a marshal instance. + + Args: + name (str): The name of the marshal. Instantiating multiple + marshals with the same ``name`` argument will provide the + same marshal each time. + """ + klass = cls._instances.get(name) + if klass is None: + klass = cls._instances[name] = super().__new__(cls) + + return klass + + def __init__(self, *, name: str): + """Instantiate a marshal. + + Args: + name (str): The name of the marshal. Instantiating multiple + marshals with the same ``name`` argument will provide the + same marshal each time. + """ + self._name = name + if not hasattr(self, "_rules"): + super().__init__() + + +class NoopRule: + """A catch-all rule that does nothing.""" + + def to_python(self, pb_value, *, absent: bool = None): + return pb_value + + def to_proto(self, value): + return value + + +__all__ = ("Marshal",) diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__init__.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b0c7da3d7725b221298f8a38dadf11d4802dce0d --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..869300b5a3574366f44cc56d6e1d0e37a316d23b Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/bytes.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/bytes.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac79acbc8fece7941f3755ae85c1696d58436a8a Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/bytes.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/dates.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/dates.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a1d614cac8efc6ee41e6a81809363bc5aea3221b Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/dates.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/enums.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/enums.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b07921bf7c66305caf9645ef577cd7c398c9fd1b Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/enums.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/field_mask.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/field_mask.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4e7195199930e0d4b6f862bd449153053369418 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/field_mask.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/message.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/message.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..77fa4fde2ab73470ea5e1b4f0a135b2b7e70f763 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/message.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/stringy_numbers.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/stringy_numbers.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..350930ea084590aeb2f9af618583a73c89cbd626 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/stringy_numbers.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/struct.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/struct.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bc7a1ebfa3cb868723d8c0d99112307d91d8dcb2 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/struct.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/wrappers.cpython-311.pyc b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/wrappers.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4390c729e2562343573658fd7d399b999868313a Binary files /dev/null and b/.venv/lib/python3.11/site-packages/proto/marshal/rules/__pycache__/wrappers.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/bytes.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/bytes.py new file mode 100644 index 0000000000000000000000000000000000000000..080b0a03d4a028705ebd67978310e32c99ec740c --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/bytes.py @@ -0,0 +1,44 @@ +# Copyright (C) 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import base64 + + +class BytesRule: + """A marshal between Python strings and protobuf bytes. + + Note: this conversion is asymmetric because Python does have a bytes type. + It is sometimes necessary to convert proto bytes fields to strings, e.g. for + JSON encoding, marshalling a message to a dict. Because bytes fields can + represent arbitrary data, bytes fields are base64 encoded when they need to + be represented as strings. + + It is necessary to have the conversion be bidirectional, i.e. + my_message == MyMessage(MyMessage.to_dict(my_message)) + + To accomplish this, we need to intercept assignments from strings and + base64 decode them back into bytes. + """ + + def to_python(self, value, *, absent: bool = None): + return value + + def to_proto(self, value): + if isinstance(value, str): + value = value.encode("utf-8") + value += b"=" * (4 - len(value) % 4) # padding + value = base64.urlsafe_b64decode(value) + + return value diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/dates.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/dates.py new file mode 100644 index 0000000000000000000000000000000000000000..33d12829b3cc562ab463347a6ea76b5d28b55dd4 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/dates.py @@ -0,0 +1,85 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +from datetime import timedelta +from datetime import timezone + +from google.protobuf import duration_pb2 +from google.protobuf import timestamp_pb2 +from proto import datetime_helpers, utils + + +class TimestampRule: + """A marshal between Python datetimes and protobuf timestamps. + + Note: Python datetimes are less precise than protobuf datetimes + (microsecond vs. nanosecond level precision). If nanosecond-level + precision matters, it is recommended to interact with the internal + proto directly. + """ + + def to_python( + self, value, *, absent: bool = None + ) -> datetime_helpers.DatetimeWithNanoseconds: + if isinstance(value, timestamp_pb2.Timestamp): + if absent: + return None + return datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(value) + return value + + def to_proto(self, value) -> timestamp_pb2.Timestamp: + if isinstance(value, datetime_helpers.DatetimeWithNanoseconds): + return value.timestamp_pb() + if isinstance(value, datetime): + return timestamp_pb2.Timestamp( + seconds=int(value.timestamp()), + nanos=value.microsecond * 1000, + ) + if isinstance(value, str): + timestamp_value = timestamp_pb2.Timestamp() + timestamp_value.FromJsonString(value=value) + return timestamp_value + return value + + +class DurationRule: + """A marshal between Python timedeltas and protobuf durations. + + Note: Python timedeltas are less precise than protobuf durations + (microsecond vs. nanosecond level precision). If nanosecond-level + precision matters, it is recommended to interact with the internal + proto directly. + """ + + def to_python(self, value, *, absent: bool = None) -> timedelta: + if isinstance(value, duration_pb2.Duration): + return timedelta( + days=value.seconds // 86400, + seconds=value.seconds % 86400, + microseconds=value.nanos // 1000, + ) + return value + + def to_proto(self, value) -> duration_pb2.Duration: + if isinstance(value, timedelta): + return duration_pb2.Duration( + seconds=value.days * 86400 + value.seconds, + nanos=value.microseconds * 1000, + ) + if isinstance(value, str): + duration_value = duration_pb2.Duration() + duration_value.FromJsonString(value=value) + return duration_value + return value diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/enums.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/enums.py new file mode 100644 index 0000000000000000000000000000000000000000..9cfc312764b6ab8ff38ebc12dd256d6b1434d6c2 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/enums.py @@ -0,0 +1,59 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Type +import enum +import warnings + + +class EnumRule: + """A marshal for converting between integer values and enum values.""" + + def __init__(self, enum_class: Type[enum.IntEnum]): + self._enum = enum_class + + def to_python(self, value, *, absent: bool = None): + if isinstance(value, int) and not isinstance(value, self._enum): + try: + # Coerce the int on the wire to the enum value. + return self._enum(value) + except ValueError: + # Since it is possible to add values to enums, we do + # not want to flatly error on this. + # + # However, it is useful to make some noise about it so + # the user realizes that an unexpected value came along. + warnings.warn( + "Unrecognized {name} enum value: {value}".format( + name=self._enum.__name__, + value=value, + ) + ) + return value + + def to_proto(self, value): + # Accept enum values and coerce to the pure integer. + # This is not strictly necessary (protocol buffers can take these + # objects as they subclass int) but nevertheless seems like the + # right thing to do. + if isinstance(value, self._enum): + return value.value + + # If a string is provided that matches an enum value, coerce it + # to the enum value. + if isinstance(value, str): + return self._enum[value].value + + # We got a pure integer; pass it on. + return value diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/field_mask.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/field_mask.py new file mode 100644 index 0000000000000000000000000000000000000000..348e7e3995d4e5fe810f87567a1296eb0b6510dd --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/field_mask.py @@ -0,0 +1,36 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import field_mask_pb2 + + +class FieldMaskRule: + """A marshal between FieldMask and strings. + + See https://github.com/googleapis/proto-plus-python/issues/333 + and + https://developers.google.com/protocol-buffers/docs/proto3#json + for more details. + """ + + def to_python(self, value, *, absent: bool = None): + return value + + def to_proto(self, value): + if isinstance(value, str): + field_mask_value = field_mask_pb2.FieldMask() + field_mask_value.FromJsonString(value=value) + return field_mask_value + + return value diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/message.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/message.py new file mode 100644 index 0000000000000000000000000000000000000000..479a2d95277631e7a659e5a1bfebd211fb1c22d7 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/message.py @@ -0,0 +1,52 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class MessageRule: + """A marshal for converting between a descriptor and proto.Message.""" + + def __init__(self, descriptor: type, wrapper: type): + self._descriptor = descriptor + self._wrapper = wrapper + + def to_python(self, value, *, absent: bool = None): + if isinstance(value, self._descriptor): + return self._wrapper.wrap(value) + return value + + def to_proto(self, value): + if isinstance(value, self._wrapper): + return self._wrapper.pb(value) + if isinstance(value, dict) and not self.is_map: + # We need to use the wrapper's marshaling to handle + # potentially problematic nested messages. + try: + # Try the fast path first. + return self._descriptor(**value) + except (TypeError, ValueError) as ex: + # If we have a TypeError or Valueerror, + # try the slow path in case the error + # was: + # - an int64/string issue. + # - a missing key issue in case a key only exists with a `_` suffix. + # See related issue: https://github.com/googleapis/python-api-core/issues/227. + # - a missing key issue due to nested struct. See: b/321905145. + return self._wrapper(value)._pb + return value + + @property + def is_map(self): + """Return True if the descriptor is a map entry, False otherwise.""" + desc = self._descriptor.DESCRIPTOR + return desc.has_options and desc.GetOptions().map_entry diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/stringy_numbers.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/stringy_numbers.py new file mode 100644 index 0000000000000000000000000000000000000000..dae69e9c9121d9b71b2c98a2f2028ca2c9c7d66d --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/stringy_numbers.py @@ -0,0 +1,71 @@ +# Copyright (C) 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from proto.primitives import ProtoType + + +class StringyNumberRule: + """A marshal between certain numeric types and strings + + This is a necessary hack to allow round trip conversion + from messages to dicts back to messages. + + See https://github.com/protocolbuffers/protobuf/issues/2679 + and + https://developers.google.com/protocol-buffers/docs/proto3#json + for more details. + """ + + def to_python(self, value, *, absent: bool = None): + return value + + def to_proto(self, value): + if value is not None: + return self._python_type(value) + + return None + + +class Int64Rule(StringyNumberRule): + _python_type = int + _proto_type = ProtoType.INT64 + + +class UInt64Rule(StringyNumberRule): + _python_type = int + _proto_type = ProtoType.UINT64 + + +class SInt64Rule(StringyNumberRule): + _python_type = int + _proto_type = ProtoType.SINT64 + + +class Fixed64Rule(StringyNumberRule): + _python_type = int + _proto_type = ProtoType.FIXED64 + + +class SFixed64Rule(StringyNumberRule): + _python_type = int + _proto_type = ProtoType.SFIXED64 + + +STRINGY_NUMBER_RULES = [ + Int64Rule, + UInt64Rule, + SInt64Rule, + Fixed64Rule, + SFixed64Rule, +] diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/struct.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/struct.py new file mode 100644 index 0000000000000000000000000000000000000000..0e34587b26b19c06caa63bf469e26c3ef6bb9dda --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/struct.py @@ -0,0 +1,143 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections.abc + +from google.protobuf import struct_pb2 + +from proto.marshal.collections import maps +from proto.marshal.collections import repeated + + +class ValueRule: + """A rule to marshal between google.protobuf.Value and Python values.""" + + def __init__(self, *, marshal): + self._marshal = marshal + + def to_python(self, value, *, absent: bool = None): + """Coerce the given value to the appropriate Python type. + + Note that both NullValue and absent fields return None. + In order to disambiguate between these two options, + use containment check, + E.g. + "value" in foo + which is True for NullValue and False for an absent value. + """ + kind = value.WhichOneof("kind") + if kind == "null_value" or absent: + return None + if kind == "bool_value": + return bool(value.bool_value) + if kind == "number_value": + return float(value.number_value) + if kind == "string_value": + return str(value.string_value) + if kind == "struct_value": + return self._marshal.to_python( + struct_pb2.Struct, + value.struct_value, + absent=False, + ) + if kind == "list_value": + return self._marshal.to_python( + struct_pb2.ListValue, + value.list_value, + absent=False, + ) + # If more variants are ever added, we want to fail loudly + # instead of tacitly returning None. + raise ValueError("Unexpected kind: %s" % kind) # pragma: NO COVER + + def to_proto(self, value) -> struct_pb2.Value: + """Return a protobuf Value object representing this value.""" + if isinstance(value, struct_pb2.Value): + return value + if value is None: + return struct_pb2.Value(null_value=0) + if isinstance(value, bool): + return struct_pb2.Value(bool_value=value) + if isinstance(value, (int, float)): + return struct_pb2.Value(number_value=float(value)) + if isinstance(value, str): + return struct_pb2.Value(string_value=value) + if isinstance(value, collections.abc.Sequence): + return struct_pb2.Value( + list_value=self._marshal.to_proto(struct_pb2.ListValue, value), + ) + if isinstance(value, collections.abc.Mapping): + return struct_pb2.Value( + struct_value=self._marshal.to_proto(struct_pb2.Struct, value), + ) + raise ValueError("Unable to coerce value: %r" % value) + + +class ListValueRule: + """A rule translating google.protobuf.ListValue and list-like objects.""" + + def __init__(self, *, marshal): + self._marshal = marshal + + def to_python(self, value, *, absent: bool = None): + """Coerce the given value to a Python sequence.""" + return ( + None + if absent + else repeated.RepeatedComposite(value.values, marshal=self._marshal) + ) + + def to_proto(self, value) -> struct_pb2.ListValue: + # We got a proto, or else something we sent originally. + # Preserve the instance we have. + if isinstance(value, struct_pb2.ListValue): + return value + if isinstance(value, repeated.RepeatedComposite): + return struct_pb2.ListValue(values=[v for v in value.pb]) + + # We got a list (or something list-like); convert it. + return struct_pb2.ListValue( + values=[self._marshal.to_proto(struct_pb2.Value, v) for v in value] + ) + + +class StructRule: + """A rule translating google.protobuf.Struct and dict-like objects.""" + + def __init__(self, *, marshal): + self._marshal = marshal + + def to_python(self, value, *, absent: bool = None): + """Coerce the given value to a Python mapping.""" + return ( + None if absent else maps.MapComposite(value.fields, marshal=self._marshal) + ) + + def to_proto(self, value) -> struct_pb2.Struct: + # We got a proto, or else something we sent originally. + # Preserve the instance we have. + if isinstance(value, struct_pb2.Struct): + return value + if isinstance(value, maps.MapComposite): + return struct_pb2.Struct( + fields={k: v for k, v in value.pb.items()}, + ) + + # We got a dict (or something dict-like); convert it. + answer = struct_pb2.Struct( + fields={ + k: self._marshal.to_proto(struct_pb2.Value, v) for k, v in value.items() + } + ) + return answer diff --git a/.venv/lib/python3.11/site-packages/proto/marshal/rules/wrappers.py b/.venv/lib/python3.11/site-packages/proto/marshal/rules/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..5bc89e595851e42e70c32cb6d970084c48959d57 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/marshal/rules/wrappers.py @@ -0,0 +1,84 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.protobuf import wrappers_pb2 + + +class WrapperRule: + """A marshal for converting the protobuf wrapper classes to Python. + + This class converts between ``google.protobuf.BoolValue``, + ``google.protobuf.StringValue``, and their siblings to the appropriate + Python equivalents. + + These are effectively similar to the protobuf primitives except + that None becomes a possible value. + """ + + def to_python(self, value, *, absent: bool = None): + if isinstance(value, self._proto_type): + if absent: + return None + return value.value + return value + + def to_proto(self, value): + if isinstance(value, self._python_type): + return self._proto_type(value=value) + return value + + +class DoubleValueRule(WrapperRule): + _proto_type = wrappers_pb2.DoubleValue + _python_type = float + + +class FloatValueRule(WrapperRule): + _proto_type = wrappers_pb2.FloatValue + _python_type = float + + +class Int64ValueRule(WrapperRule): + _proto_type = wrappers_pb2.Int64Value + _python_type = int + + +class UInt64ValueRule(WrapperRule): + _proto_type = wrappers_pb2.UInt64Value + _python_type = int + + +class Int32ValueRule(WrapperRule): + _proto_type = wrappers_pb2.Int32Value + _python_type = int + + +class UInt32ValueRule(WrapperRule): + _proto_type = wrappers_pb2.UInt32Value + _python_type = int + + +class BoolValueRule(WrapperRule): + _proto_type = wrappers_pb2.BoolValue + _python_type = bool + + +class StringValueRule(WrapperRule): + _proto_type = wrappers_pb2.StringValue + _python_type = str + + +class BytesValueRule(WrapperRule): + _proto_type = wrappers_pb2.BytesValue + _python_type = bytes diff --git a/.venv/lib/python3.11/site-packages/proto/primitives.py b/.venv/lib/python3.11/site-packages/proto/primitives.py new file mode 100644 index 0000000000000000000000000000000000000000..cff2094c678bb52e75bf48271ae5c8e1d603310c --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/primitives.py @@ -0,0 +1,38 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum + + +class ProtoType(enum.IntEnum): + """The set of basic types in protocol buffers.""" + + # These values come from google/protobuf/descriptor.proto + DOUBLE = 1 + FLOAT = 2 + INT64 = 3 + UINT64 = 4 + INT32 = 5 + FIXED64 = 6 + FIXED32 = 7 + BOOL = 8 + STRING = 9 + MESSAGE = 11 + BYTES = 12 + UINT32 = 13 + ENUM = 14 + SFIXED32 = 15 + SFIXED64 = 16 + SINT32 = 17 + SINT64 = 18 diff --git a/.venv/lib/python3.11/site-packages/proto/utils.py b/.venv/lib/python3.11/site-packages/proto/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ac3c471a2e8b5e2792175f4aa25220efba422264 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/proto/utils.py @@ -0,0 +1,58 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools + + +def has_upb(): + try: + from google._upb import _message # pylint: disable=unused-import + + has_upb = True + except ImportError: + has_upb = False + return has_upb + + +def cached_property(fx): + """Make the callable into a cached property. + + Similar to @property, but the function will only be called once per + object. + + Args: + fx (Callable[]): The property function. + + Returns: + Callable[]: The wrapped function. + """ + + @functools.wraps(fx) + def inner(self): + # Sanity check: If there is no cache at all, create an empty cache. + if not hasattr(self, "_cached_values"): + object.__setattr__(self, "_cached_values", {}) + + # If and only if the function's result is not in the cache, + # run the function. + if fx.__name__ not in self._cached_values: + self._cached_values[fx.__name__] = fx(self) + + # Return the value from cache. + return self._cached_values[fx.__name__] + + return property(inner) + + +__all__ = ("cached_property",) diff --git a/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/class.h b/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/class.h new file mode 100644 index 0000000000000000000000000000000000000000..b990507d629b4260d66d51e23a7f34a0fa465c9e --- /dev/null +++ b/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/class.h @@ -0,0 +1,767 @@ +/* + pybind11/detail/class.h: Python C API implementation details for py::class_ + + Copyright (c) 2017 Wenzel Jakob + + All rights reserved. Use of this source code is governed by a + BSD-style license that can be found in the LICENSE file. +*/ + +#pragma once + +#include +#include + +#include "exception_translation.h" + +PYBIND11_NAMESPACE_BEGIN(PYBIND11_NAMESPACE) +PYBIND11_NAMESPACE_BEGIN(detail) + +#if !defined(PYPY_VERSION) +# define PYBIND11_BUILTIN_QUALNAME +# define PYBIND11_SET_OLDPY_QUALNAME(obj, nameobj) +#else +// In PyPy, we still set __qualname__ so that we can produce reliable function type +// signatures; in CPython this macro expands to nothing: +# define PYBIND11_SET_OLDPY_QUALNAME(obj, nameobj) \ + setattr((PyObject *) obj, "__qualname__", nameobj) +#endif + +inline std::string get_fully_qualified_tp_name(PyTypeObject *type) { +#if !defined(PYPY_VERSION) + return type->tp_name; +#else + auto module_name = handle((PyObject *) type).attr("__module__").cast(); + if (module_name == PYBIND11_BUILTINS_MODULE) + return type->tp_name; + else + return std::move(module_name) + "." + type->tp_name; +#endif +} + +inline PyTypeObject *type_incref(PyTypeObject *type) { + Py_INCREF(type); + return type; +} + +#if !defined(PYPY_VERSION) + +/// `pybind11_static_property.__get__()`: Always pass the class instead of the instance. +extern "C" inline PyObject *pybind11_static_get(PyObject *self, PyObject * /*ob*/, PyObject *cls) { + return PyProperty_Type.tp_descr_get(self, cls, cls); +} + +/// `pybind11_static_property.__set__()`: Just like the above `__get__()`. +extern "C" inline int pybind11_static_set(PyObject *self, PyObject *obj, PyObject *value) { + PyObject *cls = PyType_Check(obj) ? obj : (PyObject *) Py_TYPE(obj); + return PyProperty_Type.tp_descr_set(self, cls, value); +} + +// Forward declaration to use in `make_static_property_type()` +inline void enable_dynamic_attributes(PyHeapTypeObject *heap_type); + +/** A `static_property` is the same as a `property` but the `__get__()` and `__set__()` + methods are modified to always use the object type instead of a concrete instance. + Return value: New reference. */ +inline PyTypeObject *make_static_property_type() { + constexpr auto *name = "pybind11_static_property"; + auto name_obj = reinterpret_steal(PYBIND11_FROM_STRING(name)); + + /* Danger zone: from now (and until PyType_Ready), make sure to + issue no Python C API calls which could potentially invoke the + garbage collector (the GC will call type_traverse(), which will in + turn find the newly constructed type in an invalid state) */ + auto *heap_type = (PyHeapTypeObject *) PyType_Type.tp_alloc(&PyType_Type, 0); + if (!heap_type) { + pybind11_fail("make_static_property_type(): error allocating type!"); + } + + heap_type->ht_name = name_obj.inc_ref().ptr(); +# ifdef PYBIND11_BUILTIN_QUALNAME + heap_type->ht_qualname = name_obj.inc_ref().ptr(); +# endif + + auto *type = &heap_type->ht_type; + type->tp_name = name; + type->tp_base = type_incref(&PyProperty_Type); + type->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HEAPTYPE; + type->tp_descr_get = pybind11_static_get; + type->tp_descr_set = pybind11_static_set; + +# if PY_VERSION_HEX >= 0x030C0000 + // Since Python-3.12 property-derived types are required to + // have dynamic attributes (to set `__doc__`) + enable_dynamic_attributes(heap_type); +# endif + + if (PyType_Ready(type) < 0) { + pybind11_fail("make_static_property_type(): failure in PyType_Ready()!"); + } + + setattr((PyObject *) type, "__module__", str("pybind11_builtins")); + PYBIND11_SET_OLDPY_QUALNAME(type, name_obj); + + return type; +} + +#else // PYPY + +/** PyPy has some issues with the above C API, so we evaluate Python code instead. + This function will only be called once so performance isn't really a concern. + Return value: New reference. */ +inline PyTypeObject *make_static_property_type() { + auto d = dict(); + PyObject *result = PyRun_String(R"(\ +class pybind11_static_property(property): + def __get__(self, obj, cls): + return property.__get__(self, cls, cls) + + def __set__(self, obj, value): + cls = obj if isinstance(obj, type) else type(obj) + property.__set__(self, cls, value) +)", + Py_file_input, + d.ptr(), + d.ptr()); + if (result == nullptr) + throw error_already_set(); + Py_DECREF(result); + return (PyTypeObject *) d["pybind11_static_property"].cast().release().ptr(); +} + +#endif // PYPY + +/** Types with static properties need to handle `Type.static_prop = x` in a specific way. + By default, Python replaces the `static_property` itself, but for wrapped C++ types + we need to call `static_property.__set__()` in order to propagate the new value to + the underlying C++ data structure. */ +extern "C" inline int pybind11_meta_setattro(PyObject *obj, PyObject *name, PyObject *value) { + // Use `_PyType_Lookup()` instead of `PyObject_GetAttr()` in order to get the raw + // descriptor (`property`) instead of calling `tp_descr_get` (`property.__get__()`). + PyObject *descr = _PyType_Lookup((PyTypeObject *) obj, name); + + // The following assignment combinations are possible: + // 1. `Type.static_prop = value` --> descr_set: `Type.static_prop.__set__(value)` + // 2. `Type.static_prop = other_static_prop` --> setattro: replace existing `static_prop` + // 3. `Type.regular_attribute = value` --> setattro: regular attribute assignment + auto *const static_prop = (PyObject *) get_internals().static_property_type; + const auto call_descr_set = (descr != nullptr) && (value != nullptr) + && (PyObject_IsInstance(descr, static_prop) != 0) + && (PyObject_IsInstance(value, static_prop) == 0); + if (call_descr_set) { + // Call `static_property.__set__()` instead of replacing the `static_property`. +#if !defined(PYPY_VERSION) + return Py_TYPE(descr)->tp_descr_set(descr, obj, value); +#else + if (PyObject *result = PyObject_CallMethod(descr, "__set__", "OO", obj, value)) { + Py_DECREF(result); + return 0; + } else { + return -1; + } +#endif + } else { + // Replace existing attribute. + return PyType_Type.tp_setattro(obj, name, value); + } +} + +/** + * Python 3's PyInstanceMethod_Type hides itself via its tp_descr_get, which prevents aliasing + * methods via cls.attr("m2") = cls.attr("m1"): instead the tp_descr_get returns a plain function, + * when called on a class, or a PyMethod, when called on an instance. Override that behaviour here + * to do a special case bypass for PyInstanceMethod_Types. + */ +extern "C" inline PyObject *pybind11_meta_getattro(PyObject *obj, PyObject *name) { + PyObject *descr = _PyType_Lookup((PyTypeObject *) obj, name); + if (descr && PyInstanceMethod_Check(descr)) { + Py_INCREF(descr); + return descr; + } + return PyType_Type.tp_getattro(obj, name); +} + +/// metaclass `__call__` function that is used to create all pybind11 objects. +extern "C" inline PyObject *pybind11_meta_call(PyObject *type, PyObject *args, PyObject *kwargs) { + + // use the default metaclass call to create/initialize the object + PyObject *self = PyType_Type.tp_call(type, args, kwargs); + if (self == nullptr) { + return nullptr; + } + + // Ensure that the base __init__ function(s) were called + values_and_holders vhs(self); + for (const auto &vh : vhs) { + if (!vh.holder_constructed() && !vhs.is_redundant_value_and_holder(vh)) { + PyErr_Format(PyExc_TypeError, + "%.200s.__init__() must be called when overriding __init__", + get_fully_qualified_tp_name(vh.type->type).c_str()); + Py_DECREF(self); + return nullptr; + } + } + + return self; +} + +/// Cleanup the type-info for a pybind11-registered type. +extern "C" inline void pybind11_meta_dealloc(PyObject *obj) { + with_internals([obj](internals &internals) { + auto *type = (PyTypeObject *) obj; + + // A pybind11-registered type will: + // 1) be found in internals.registered_types_py + // 2) have exactly one associated `detail::type_info` + auto found_type = internals.registered_types_py.find(type); + if (found_type != internals.registered_types_py.end() && found_type->second.size() == 1 + && found_type->second[0]->type == type) { + + auto *tinfo = found_type->second[0]; + auto tindex = std::type_index(*tinfo->cpptype); + internals.direct_conversions.erase(tindex); + + if (tinfo->module_local) { + get_local_internals().registered_types_cpp.erase(tindex); + } else { + internals.registered_types_cpp.erase(tindex); + } + internals.registered_types_py.erase(tinfo->type); + + // Actually just `std::erase_if`, but that's only available in C++20 + auto &cache = internals.inactive_override_cache; + for (auto it = cache.begin(), last = cache.end(); it != last;) { + if (it->first == (PyObject *) tinfo->type) { + it = cache.erase(it); + } else { + ++it; + } + } + + delete tinfo; + } + }); + + PyType_Type.tp_dealloc(obj); +} + +/** This metaclass is assigned by default to all pybind11 types and is required in order + for static properties to function correctly. Users may override this using `py::metaclass`. + Return value: New reference. */ +inline PyTypeObject *make_default_metaclass() { + constexpr auto *name = "pybind11_type"; + auto name_obj = reinterpret_steal(PYBIND11_FROM_STRING(name)); + + /* Danger zone: from now (and until PyType_Ready), make sure to + issue no Python C API calls which could potentially invoke the + garbage collector (the GC will call type_traverse(), which will in + turn find the newly constructed type in an invalid state) */ + auto *heap_type = (PyHeapTypeObject *) PyType_Type.tp_alloc(&PyType_Type, 0); + if (!heap_type) { + pybind11_fail("make_default_metaclass(): error allocating metaclass!"); + } + + heap_type->ht_name = name_obj.inc_ref().ptr(); +#ifdef PYBIND11_BUILTIN_QUALNAME + heap_type->ht_qualname = name_obj.inc_ref().ptr(); +#endif + + auto *type = &heap_type->ht_type; + type->tp_name = name; + type->tp_base = type_incref(&PyType_Type); + type->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HEAPTYPE; + + type->tp_call = pybind11_meta_call; + + type->tp_setattro = pybind11_meta_setattro; + type->tp_getattro = pybind11_meta_getattro; + + type->tp_dealloc = pybind11_meta_dealloc; + + if (PyType_Ready(type) < 0) { + pybind11_fail("make_default_metaclass(): failure in PyType_Ready()!"); + } + + setattr((PyObject *) type, "__module__", str("pybind11_builtins")); + PYBIND11_SET_OLDPY_QUALNAME(type, name_obj); + + return type; +} + +/// For multiple inheritance types we need to recursively register/deregister base pointers for any +/// base classes with pointers that are difference from the instance value pointer so that we can +/// correctly recognize an offset base class pointer. This calls a function with any offset base +/// ptrs. +inline void traverse_offset_bases(void *valueptr, + const detail::type_info *tinfo, + instance *self, + bool (*f)(void * /*parentptr*/, instance * /*self*/)) { + for (handle h : reinterpret_borrow(tinfo->type->tp_bases)) { + if (auto *parent_tinfo = get_type_info((PyTypeObject *) h.ptr())) { + for (auto &c : parent_tinfo->implicit_casts) { + if (c.first == tinfo->cpptype) { + auto *parentptr = c.second(valueptr); + if (parentptr != valueptr) { + f(parentptr, self); + } + traverse_offset_bases(parentptr, parent_tinfo, self, f); + break; + } + } + } + } +} + +inline bool register_instance_impl(void *ptr, instance *self) { + with_instance_map(ptr, [&](instance_map &instances) { instances.emplace(ptr, self); }); + return true; // unused, but gives the same signature as the deregister func +} +inline bool deregister_instance_impl(void *ptr, instance *self) { + return with_instance_map(ptr, [&](instance_map &instances) { + auto range = instances.equal_range(ptr); + for (auto it = range.first; it != range.second; ++it) { + if (self == it->second) { + instances.erase(it); + return true; + } + } + return false; + }); +} + +inline void register_instance(instance *self, void *valptr, const type_info *tinfo) { + register_instance_impl(valptr, self); + if (!tinfo->simple_ancestors) { + traverse_offset_bases(valptr, tinfo, self, register_instance_impl); + } +} + +inline bool deregister_instance(instance *self, void *valptr, const type_info *tinfo) { + bool ret = deregister_instance_impl(valptr, self); + if (!tinfo->simple_ancestors) { + traverse_offset_bases(valptr, tinfo, self, deregister_instance_impl); + } + return ret; +} + +/// Instance creation function for all pybind11 types. It allocates the internal instance layout +/// for holding C++ objects and holders. Allocation is done lazily (the first time the instance is +/// cast to a reference or pointer), and initialization is done by an `__init__` function. +inline PyObject *make_new_instance(PyTypeObject *type) { +#if defined(PYPY_VERSION) + // PyPy gets tp_basicsize wrong (issue 2482) under multiple inheritance when the first + // inherited object is a plain Python type (i.e. not derived from an extension type). Fix it. + ssize_t instance_size = static_cast(sizeof(instance)); + if (type->tp_basicsize < instance_size) { + type->tp_basicsize = instance_size; + } +#endif + PyObject *self = type->tp_alloc(type, 0); + auto *inst = reinterpret_cast(self); + // Allocate the value/holder internals: + inst->allocate_layout(); + + return self; +} + +/// Instance creation function for all pybind11 types. It only allocates space for the +/// C++ object, but doesn't call the constructor -- an `__init__` function must do that. +extern "C" inline PyObject *pybind11_object_new(PyTypeObject *type, PyObject *, PyObject *) { + return make_new_instance(type); +} + +/// An `__init__` function constructs the C++ object. Users should provide at least one +/// of these using `py::init` or directly with `.def(__init__, ...)`. Otherwise, the +/// following default function will be used which simply throws an exception. +extern "C" inline int pybind11_object_init(PyObject *self, PyObject *, PyObject *) { + PyTypeObject *type = Py_TYPE(self); + std::string msg = get_fully_qualified_tp_name(type) + ": No constructor defined!"; + set_error(PyExc_TypeError, msg.c_str()); + return -1; +} + +inline void add_patient(PyObject *nurse, PyObject *patient) { + auto *instance = reinterpret_cast(nurse); + instance->has_patients = true; + Py_INCREF(patient); + + with_internals([&](internals &internals) { internals.patients[nurse].push_back(patient); }); +} + +inline void clear_patients(PyObject *self) { + auto *instance = reinterpret_cast(self); + std::vector patients; + + with_internals([&](internals &internals) { + auto pos = internals.patients.find(self); + + if (pos == internals.patients.end()) { + pybind11_fail( + "FATAL: Internal consistency check failed: Invalid clear_patients() call."); + } + + // Clearing the patients can cause more Python code to run, which + // can invalidate the iterator. Extract the vector of patients + // from the unordered_map first. + patients = std::move(pos->second); + internals.patients.erase(pos); + }); + + instance->has_patients = false; + for (PyObject *&patient : patients) { + Py_CLEAR(patient); + } +} + +/// Clears all internal data from the instance and removes it from registered instances in +/// preparation for deallocation. +inline void clear_instance(PyObject *self) { + auto *instance = reinterpret_cast(self); + + // Deallocate any values/holders, if present: + for (auto &v_h : values_and_holders(instance)) { + if (v_h) { + + // We have to deregister before we call dealloc because, for virtual MI types, we still + // need to be able to get the parent pointers. + if (v_h.instance_registered() + && !deregister_instance(instance, v_h.value_ptr(), v_h.type)) { + pybind11_fail( + "pybind11_object_dealloc(): Tried to deallocate unregistered instance!"); + } + + if (instance->owned || v_h.holder_constructed()) { + v_h.type->dealloc(v_h); + } + } + } + // Deallocate the value/holder layout internals: + instance->deallocate_layout(); + + if (instance->weakrefs) { + PyObject_ClearWeakRefs(self); + } + + PyObject **dict_ptr = _PyObject_GetDictPtr(self); + if (dict_ptr) { + Py_CLEAR(*dict_ptr); + } + + if (instance->has_patients) { + clear_patients(self); + } +} + +/// Instance destructor function for all pybind11 types. It calls `type_info.dealloc` +/// to destroy the C++ object itself, while the rest is Python bookkeeping. +extern "C" inline void pybind11_object_dealloc(PyObject *self) { + auto *type = Py_TYPE(self); + + // If this is a GC tracked object, untrack it first + // Note that the track call is implicitly done by the + // default tp_alloc, which we never override. + if (PyType_HasFeature(type, Py_TPFLAGS_HAVE_GC) != 0) { + PyObject_GC_UnTrack(self); + } + + clear_instance(self); + + type->tp_free(self); + +#if PY_VERSION_HEX < 0x03080000 + // `type->tp_dealloc != pybind11_object_dealloc` means that we're being called + // as part of a derived type's dealloc, in which case we're not allowed to decref + // the type here. For cross-module compatibility, we shouldn't compare directly + // with `pybind11_object_dealloc`, but with the common one stashed in internals. + auto pybind11_object_type = (PyTypeObject *) get_internals().instance_base; + if (type->tp_dealloc == pybind11_object_type->tp_dealloc) + Py_DECREF(type); +#else + // This was not needed before Python 3.8 (Python issue 35810) + // https://github.com/pybind/pybind11/issues/1946 + Py_DECREF(type); +#endif +} + +std::string error_string(); + +/** Create the type which can be used as a common base for all classes. This is + needed in order to satisfy Python's requirements for multiple inheritance. + Return value: New reference. */ +inline PyObject *make_object_base_type(PyTypeObject *metaclass) { + constexpr auto *name = "pybind11_object"; + auto name_obj = reinterpret_steal(PYBIND11_FROM_STRING(name)); + + /* Danger zone: from now (and until PyType_Ready), make sure to + issue no Python C API calls which could potentially invoke the + garbage collector (the GC will call type_traverse(), which will in + turn find the newly constructed type in an invalid state) */ + auto *heap_type = (PyHeapTypeObject *) metaclass->tp_alloc(metaclass, 0); + if (!heap_type) { + pybind11_fail("make_object_base_type(): error allocating type!"); + } + + heap_type->ht_name = name_obj.inc_ref().ptr(); +#ifdef PYBIND11_BUILTIN_QUALNAME + heap_type->ht_qualname = name_obj.inc_ref().ptr(); +#endif + + auto *type = &heap_type->ht_type; + type->tp_name = name; + type->tp_base = type_incref(&PyBaseObject_Type); + type->tp_basicsize = static_cast(sizeof(instance)); + type->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HEAPTYPE; + + type->tp_new = pybind11_object_new; + type->tp_init = pybind11_object_init; + type->tp_dealloc = pybind11_object_dealloc; + + /* Support weak references (needed for the keep_alive feature) */ + type->tp_weaklistoffset = offsetof(instance, weakrefs); + + if (PyType_Ready(type) < 0) { + pybind11_fail("PyType_Ready failed in make_object_base_type(): " + error_string()); + } + + setattr((PyObject *) type, "__module__", str("pybind11_builtins")); + PYBIND11_SET_OLDPY_QUALNAME(type, name_obj); + + assert(!PyType_HasFeature(type, Py_TPFLAGS_HAVE_GC)); + return (PyObject *) heap_type; +} + +/// dynamic_attr: Allow the garbage collector to traverse the internal instance `__dict__`. +extern "C" inline int pybind11_traverse(PyObject *self, visitproc visit, void *arg) { +#if PY_VERSION_HEX >= 0x030D0000 + PyObject_VisitManagedDict(self, visit, arg); +#else + PyObject *&dict = *_PyObject_GetDictPtr(self); + Py_VISIT(dict); +#endif +// https://docs.python.org/3/c-api/typeobj.html#c.PyTypeObject.tp_traverse +#if PY_VERSION_HEX >= 0x03090000 + Py_VISIT(Py_TYPE(self)); +#endif + return 0; +} + +/// dynamic_attr: Allow the GC to clear the dictionary. +extern "C" inline int pybind11_clear(PyObject *self) { +#if PY_VERSION_HEX >= 0x030D0000 + PyObject_ClearManagedDict(self); +#else + PyObject *&dict = *_PyObject_GetDictPtr(self); + Py_CLEAR(dict); +#endif + return 0; +} + +/// Give instances of this type a `__dict__` and opt into garbage collection. +inline void enable_dynamic_attributes(PyHeapTypeObject *heap_type) { + auto *type = &heap_type->ht_type; + type->tp_flags |= Py_TPFLAGS_HAVE_GC; +#if PY_VERSION_HEX < 0x030B0000 + type->tp_dictoffset = type->tp_basicsize; // place dict at the end + type->tp_basicsize += (ssize_t) sizeof(PyObject *); // and allocate enough space for it +#else + type->tp_flags |= Py_TPFLAGS_MANAGED_DICT; +#endif + type->tp_traverse = pybind11_traverse; + type->tp_clear = pybind11_clear; + + static PyGetSetDef getset[] + = {{"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict, nullptr, nullptr}, + {nullptr, nullptr, nullptr, nullptr, nullptr}}; + type->tp_getset = getset; +} + +/// buffer_protocol: Fill in the view as specified by flags. +extern "C" inline int pybind11_getbuffer(PyObject *obj, Py_buffer *view, int flags) { + // Look for a `get_buffer` implementation in this type's info or any bases (following MRO). + type_info *tinfo = nullptr; + for (auto type : reinterpret_borrow(Py_TYPE(obj)->tp_mro)) { + tinfo = get_type_info((PyTypeObject *) type.ptr()); + if (tinfo && tinfo->get_buffer) { + break; + } + } + if (view == nullptr || !tinfo || !tinfo->get_buffer) { + if (view) { + view->obj = nullptr; + } + set_error(PyExc_BufferError, "pybind11_getbuffer(): Internal error"); + return -1; + } + std::memset(view, 0, sizeof(Py_buffer)); + buffer_info *info = nullptr; + try { + info = tinfo->get_buffer(obj, tinfo->get_buffer_data); + } catch (...) { + try_translate_exceptions(); + raise_from(PyExc_BufferError, "Error getting buffer"); + return -1; + } + if (info == nullptr) { + pybind11_fail("FATAL UNEXPECTED SITUATION: tinfo->get_buffer() returned nullptr."); + } + + if ((flags & PyBUF_WRITABLE) == PyBUF_WRITABLE && info->readonly) { + delete info; + // view->obj = nullptr; // Was just memset to 0, so not necessary + set_error(PyExc_BufferError, "Writable buffer requested for readonly storage"); + return -1; + } + view->obj = obj; + view->ndim = 1; + view->internal = info; + view->buf = info->ptr; + view->itemsize = info->itemsize; + view->len = view->itemsize; + for (auto s : info->shape) { + view->len *= s; + } + view->readonly = static_cast(info->readonly); + if ((flags & PyBUF_FORMAT) == PyBUF_FORMAT) { + view->format = const_cast(info->format.c_str()); + } + if ((flags & PyBUF_STRIDES) == PyBUF_STRIDES) { + view->ndim = (int) info->ndim; + view->strides = info->strides.data(); + view->shape = info->shape.data(); + } + Py_INCREF(view->obj); + return 0; +} + +/// buffer_protocol: Release the resources of the buffer. +extern "C" inline void pybind11_releasebuffer(PyObject *, Py_buffer *view) { + delete (buffer_info *) view->internal; +} + +/// Give this type a buffer interface. +inline void enable_buffer_protocol(PyHeapTypeObject *heap_type) { + heap_type->ht_type.tp_as_buffer = &heap_type->as_buffer; + + heap_type->as_buffer.bf_getbuffer = pybind11_getbuffer; + heap_type->as_buffer.bf_releasebuffer = pybind11_releasebuffer; +} + +/** Create a brand new Python type according to the `type_record` specification. + Return value: New reference. */ +inline PyObject *make_new_python_type(const type_record &rec) { + auto name = reinterpret_steal(PYBIND11_FROM_STRING(rec.name)); + + auto qualname = name; + if (rec.scope && !PyModule_Check(rec.scope.ptr()) && hasattr(rec.scope, "__qualname__")) { + qualname = reinterpret_steal( + PyUnicode_FromFormat("%U.%U", rec.scope.attr("__qualname__").ptr(), name.ptr())); + } + + object module_; + if (rec.scope) { + if (hasattr(rec.scope, "__module__")) { + module_ = rec.scope.attr("__module__"); + } else if (hasattr(rec.scope, "__name__")) { + module_ = rec.scope.attr("__name__"); + } + } + + const auto *full_name = c_str( +#if !defined(PYPY_VERSION) + module_ ? str(module_).cast() + "." + rec.name : +#endif + rec.name); + + char *tp_doc = nullptr; + if (rec.doc && options::show_user_defined_docstrings()) { + /* Allocate memory for docstring (Python will free this later on) */ + size_t size = std::strlen(rec.doc) + 1; +#if PY_VERSION_HEX >= 0x030D0000 + tp_doc = (char *) PyMem_MALLOC(size); +#else + tp_doc = (char *) PyObject_MALLOC(size); +#endif + std::memcpy((void *) tp_doc, rec.doc, size); + } + + auto &internals = get_internals(); + auto bases = tuple(rec.bases); + auto *base = (bases.empty()) ? internals.instance_base : bases[0].ptr(); + + /* Danger zone: from now (and until PyType_Ready), make sure to + issue no Python C API calls which could potentially invoke the + garbage collector (the GC will call type_traverse(), which will in + turn find the newly constructed type in an invalid state) */ + auto *metaclass + = rec.metaclass.ptr() ? (PyTypeObject *) rec.metaclass.ptr() : internals.default_metaclass; + + auto *heap_type = (PyHeapTypeObject *) metaclass->tp_alloc(metaclass, 0); + if (!heap_type) { + pybind11_fail(std::string(rec.name) + ": Unable to create type object!"); + } + + heap_type->ht_name = name.release().ptr(); +#ifdef PYBIND11_BUILTIN_QUALNAME + heap_type->ht_qualname = qualname.inc_ref().ptr(); +#endif + + auto *type = &heap_type->ht_type; + type->tp_name = full_name; + type->tp_doc = tp_doc; + type->tp_base = type_incref((PyTypeObject *) base); + type->tp_basicsize = static_cast(sizeof(instance)); + if (!bases.empty()) { + type->tp_bases = bases.release().ptr(); + } + + /* Don't inherit base __init__ */ + type->tp_init = pybind11_object_init; + + /* Supported protocols */ + type->tp_as_number = &heap_type->as_number; + type->tp_as_sequence = &heap_type->as_sequence; + type->tp_as_mapping = &heap_type->as_mapping; + type->tp_as_async = &heap_type->as_async; + + /* Flags */ + type->tp_flags |= Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HEAPTYPE; + if (!rec.is_final) { + type->tp_flags |= Py_TPFLAGS_BASETYPE; + } + + if (rec.dynamic_attr) { + enable_dynamic_attributes(heap_type); + } + + if (rec.buffer_protocol) { + enable_buffer_protocol(heap_type); + } + + if (rec.custom_type_setup_callback) { + rec.custom_type_setup_callback(heap_type); + } + + if (PyType_Ready(type) < 0) { + pybind11_fail(std::string(rec.name) + ": PyType_Ready failed: " + error_string()); + } + + assert(!rec.dynamic_attr || PyType_HasFeature(type, Py_TPFLAGS_HAVE_GC)); + + /* Register type with the parent scope */ + if (rec.scope) { + setattr(rec.scope, rec.name, (PyObject *) type); + } else { + Py_INCREF(type); // Keep it alive forever (reference leak) + } + + if (module_) { // Needed by pydoc + setattr((PyObject *) type, "__module__", module_); + } + + PYBIND11_SET_OLDPY_QUALNAME(type, qualname); + + return (PyObject *) type; +} + +PYBIND11_NAMESPACE_END(detail) +PYBIND11_NAMESPACE_END(PYBIND11_NAMESPACE) diff --git a/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/common.h b/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/common.h new file mode 100644 index 0000000000000000000000000000000000000000..c51d1d60bc739bfd67efc64fb517f0f0c2d7592a --- /dev/null +++ b/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/common.h @@ -0,0 +1,1287 @@ +/* + pybind11/detail/common.h -- Basic macros + + Copyright (c) 2016 Wenzel Jakob + + All rights reserved. Use of this source code is governed by a + BSD-style license that can be found in the LICENSE file. +*/ + +#pragma once + +#define PYBIND11_VERSION_MAJOR 2 +#define PYBIND11_VERSION_MINOR 13 +#define PYBIND11_VERSION_PATCH 6 + +// Similar to Python's convention: https://docs.python.org/3/c-api/apiabiversion.html +// Additional convention: 0xD = dev +#define PYBIND11_VERSION_HEX 0x020D0600 + +// Define some generic pybind11 helper macros for warning management. +// +// Note that compiler-specific push/pop pairs are baked into the +// PYBIND11_NAMESPACE_BEGIN/PYBIND11_NAMESPACE_END pair of macros. Therefore manual +// PYBIND11_WARNING_PUSH/PYBIND11_WARNING_POP are usually only needed in `#include` sections. +// +// If you find you need to suppress a warning, please try to make the suppression as local as +// possible using these macros. Please also be sure to push/pop with the pybind11 macros. Please +// only use compiler specifics if you need to check specific versions, e.g. Apple Clang vs. vanilla +// Clang. +#if defined(_MSC_VER) +# define PYBIND11_COMPILER_MSVC +# define PYBIND11_PRAGMA(...) __pragma(__VA_ARGS__) +# define PYBIND11_WARNING_PUSH PYBIND11_PRAGMA(warning(push)) +# define PYBIND11_WARNING_POP PYBIND11_PRAGMA(warning(pop)) +#elif defined(__INTEL_COMPILER) +# define PYBIND11_COMPILER_INTEL +# define PYBIND11_PRAGMA(...) _Pragma(#__VA_ARGS__) +# define PYBIND11_WARNING_PUSH PYBIND11_PRAGMA(warning push) +# define PYBIND11_WARNING_POP PYBIND11_PRAGMA(warning pop) +#elif defined(__clang__) +# define PYBIND11_COMPILER_CLANG +# define PYBIND11_PRAGMA(...) _Pragma(#__VA_ARGS__) +# define PYBIND11_WARNING_PUSH PYBIND11_PRAGMA(clang diagnostic push) +# define PYBIND11_WARNING_POP PYBIND11_PRAGMA(clang diagnostic push) +#elif defined(__GNUC__) +# define PYBIND11_COMPILER_GCC +# define PYBIND11_PRAGMA(...) _Pragma(#__VA_ARGS__) +# define PYBIND11_WARNING_PUSH PYBIND11_PRAGMA(GCC diagnostic push) +# define PYBIND11_WARNING_POP PYBIND11_PRAGMA(GCC diagnostic pop) +#endif + +#ifdef PYBIND11_COMPILER_MSVC +# define PYBIND11_WARNING_DISABLE_MSVC(name) PYBIND11_PRAGMA(warning(disable : name)) +#else +# define PYBIND11_WARNING_DISABLE_MSVC(name) +#endif + +#ifdef PYBIND11_COMPILER_CLANG +# define PYBIND11_WARNING_DISABLE_CLANG(name) PYBIND11_PRAGMA(clang diagnostic ignored name) +#else +# define PYBIND11_WARNING_DISABLE_CLANG(name) +#endif + +#ifdef PYBIND11_COMPILER_GCC +# define PYBIND11_WARNING_DISABLE_GCC(name) PYBIND11_PRAGMA(GCC diagnostic ignored name) +#else +# define PYBIND11_WARNING_DISABLE_GCC(name) +#endif + +#ifdef PYBIND11_COMPILER_INTEL +# define PYBIND11_WARNING_DISABLE_INTEL(name) PYBIND11_PRAGMA(warning disable name) +#else +# define PYBIND11_WARNING_DISABLE_INTEL(name) +#endif + +#define PYBIND11_NAMESPACE_BEGIN(name) \ + namespace name { \ + PYBIND11_WARNING_PUSH + +#define PYBIND11_NAMESPACE_END(name) \ + PYBIND11_WARNING_POP \ + } + +// Robust support for some features and loading modules compiled against different pybind versions +// requires forcing hidden visibility on pybind code, so we enforce this by setting the attribute +// on the main `pybind11` namespace. +#if !defined(PYBIND11_NAMESPACE) +# ifdef __GNUG__ +# define PYBIND11_NAMESPACE pybind11 __attribute__((visibility("hidden"))) +# else +# define PYBIND11_NAMESPACE pybind11 +# endif +#endif + +#if !(defined(_MSC_VER) && __cplusplus == 199711L) +# if __cplusplus >= 201402L +# define PYBIND11_CPP14 +# if __cplusplus >= 201703L +# define PYBIND11_CPP17 +# if __cplusplus >= 202002L +# define PYBIND11_CPP20 +// Please update tests/pybind11_tests.cpp `cpp_std()` when adding a macro here. +# endif +# endif +# endif +#elif defined(_MSC_VER) && __cplusplus == 199711L +// MSVC sets _MSVC_LANG rather than __cplusplus (supposedly until the standard is fully +// implemented). Unless you use the /Zc:__cplusplus flag on Visual Studio 2017 15.7 Preview 3 +// or newer. +# if _MSVC_LANG >= 201402L +# define PYBIND11_CPP14 +# if _MSVC_LANG > 201402L +# define PYBIND11_CPP17 +# if _MSVC_LANG >= 202002L +# define PYBIND11_CPP20 +# endif +# endif +# endif +#endif + +#if defined(PYBIND11_CPP20) +# define PYBIND11_CONSTINIT constinit +# define PYBIND11_DTOR_CONSTEXPR constexpr +#else +# define PYBIND11_CONSTINIT +# define PYBIND11_DTOR_CONSTEXPR +#endif + +// Compiler version assertions +#if defined(__INTEL_COMPILER) +# if __INTEL_COMPILER < 1800 +# error pybind11 requires Intel C++ compiler v18 or newer +# elif __INTEL_COMPILER < 1900 && defined(PYBIND11_CPP14) +# error pybind11 supports only C++11 with Intel C++ compiler v18. Use v19 or newer for C++14. +# endif +/* The following pragma cannot be pop'ed: + https://community.intel.com/t5/Intel-C-Compiler/Inline-and-no-inline-warning/td-p/1216764 */ +# pragma warning disable 2196 // warning #2196: routine is both "inline" and "noinline" +#elif defined(__clang__) && !defined(__apple_build_version__) +# if __clang_major__ < 3 || (__clang_major__ == 3 && __clang_minor__ < 3) +# error pybind11 requires clang 3.3 or newer +# endif +#elif defined(__clang__) +// Apple changes clang version macros to its Xcode version; the first Xcode release based on +// (upstream) clang 3.3 was Xcode 5: +# if __clang_major__ < 5 +# error pybind11 requires Xcode/clang 5.0 or newer +# endif +#elif defined(__GNUG__) +# if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 8) +# error pybind11 requires gcc 4.8 or newer +# endif +#elif defined(_MSC_VER) +# if _MSC_VER < 1910 +# error pybind11 2.10+ requires MSVC 2017 or newer +# endif +#endif + +#if !defined(PYBIND11_EXPORT) +# if defined(WIN32) || defined(_WIN32) +# define PYBIND11_EXPORT __declspec(dllexport) +# else +# define PYBIND11_EXPORT __attribute__((visibility("default"))) +# endif +#endif + +#if !defined(PYBIND11_EXPORT_EXCEPTION) +# if defined(__apple_build_version__) +# define PYBIND11_EXPORT_EXCEPTION PYBIND11_EXPORT +# else +# define PYBIND11_EXPORT_EXCEPTION +# endif +#endif + +// For CUDA, GCC7, GCC8: +// PYBIND11_NOINLINE_FORCED is incompatible with `-Wattributes -Werror`. +// When defining PYBIND11_NOINLINE_FORCED, it is best to also use `-Wno-attributes`. +// However, the measured shared-library size saving when using noinline are only +// 1.7% for CUDA, -0.2% for GCC7, and 0.0% for GCC8 (using -DCMAKE_BUILD_TYPE=MinSizeRel, +// the default under pybind11/tests). +#if !defined(PYBIND11_NOINLINE_FORCED) \ + && (defined(__CUDACC__) || (defined(__GNUC__) && (__GNUC__ == 7 || __GNUC__ == 8))) +# define PYBIND11_NOINLINE_DISABLED +#endif + +// The PYBIND11_NOINLINE macro is for function DEFINITIONS. +// In contrast, FORWARD DECLARATIONS should never use this macro: +// https://stackoverflow.com/questions/9317473/forward-declaration-of-inline-functions +#if defined(PYBIND11_NOINLINE_DISABLED) // Option for maximum portability and experimentation. +# define PYBIND11_NOINLINE inline +#elif defined(_MSC_VER) +# define PYBIND11_NOINLINE __declspec(noinline) inline +#else +# define PYBIND11_NOINLINE __attribute__((noinline)) inline +#endif + +#if defined(__MINGW32__) +// For unknown reasons all PYBIND11_DEPRECATED member trigger a warning when declared +// whether it is used or not +# define PYBIND11_DEPRECATED(reason) +#elif defined(PYBIND11_CPP14) +# define PYBIND11_DEPRECATED(reason) [[deprecated(reason)]] +#else +# define PYBIND11_DEPRECATED(reason) __attribute__((deprecated(reason))) +#endif + +#if defined(PYBIND11_CPP17) +# define PYBIND11_MAYBE_UNUSED [[maybe_unused]] +#elif defined(_MSC_VER) && !defined(__clang__) +# define PYBIND11_MAYBE_UNUSED +#else +# define PYBIND11_MAYBE_UNUSED __attribute__((__unused__)) +#endif + +/* Don't let Python.h #define (v)snprintf as macro because they are implemented + properly in Visual Studio since 2015. */ +#if defined(_MSC_VER) +# define HAVE_SNPRINTF 1 +#endif + +/// Include Python header, disable linking to pythonX_d.lib on Windows in debug mode +#if defined(_MSC_VER) +PYBIND11_WARNING_PUSH +PYBIND11_WARNING_DISABLE_MSVC(4505) +// C4505: 'PySlice_GetIndicesEx': unreferenced local function has been removed (PyPy only) +# if defined(_DEBUG) && !defined(Py_DEBUG) +// Workaround for a VS 2022 issue. +// NOTE: This workaround knowingly violates the Python.h include order requirement: +// https://docs.python.org/3/c-api/intro.html#include-files +// See https://github.com/pybind/pybind11/pull/3497 for full context. +# include +# if _MSVC_STL_VERSION >= 143 +# include +# endif +# define PYBIND11_DEBUG_MARKER +# undef _DEBUG +# endif +#endif + +// https://en.cppreference.com/w/c/chrono/localtime +#if defined(__STDC_LIB_EXT1__) && !defined(__STDC_WANT_LIB_EXT1__) +# define __STDC_WANT_LIB_EXT1__ +#endif + +#ifdef __has_include +// std::optional (but including it in c++14 mode isn't allowed) +# if defined(PYBIND11_CPP17) && __has_include() +# define PYBIND11_HAS_OPTIONAL 1 +# endif +// std::experimental::optional (but not allowed in c++11 mode) +# if defined(PYBIND11_CPP14) && (__has_include() && \ + !__has_include()) +# define PYBIND11_HAS_EXP_OPTIONAL 1 +# endif +// std::variant +# if defined(PYBIND11_CPP17) && __has_include() +# define PYBIND11_HAS_VARIANT 1 +# endif +#elif defined(_MSC_VER) && defined(PYBIND11_CPP17) +# define PYBIND11_HAS_OPTIONAL 1 +# define PYBIND11_HAS_VARIANT 1 +#endif + +#if defined(PYBIND11_CPP17) +# if defined(__has_include) +# if __has_include() +# define PYBIND11_HAS_STRING_VIEW +# endif +# elif defined(_MSC_VER) +# define PYBIND11_HAS_STRING_VIEW +# endif +#endif + +#include +#if PY_VERSION_HEX < 0x03070000 +# error "PYTHON < 3.7 IS UNSUPPORTED. pybind11 v2.12 was the last to support Python 3.6." +#endif +#include +#include + +/* Python #defines overrides on all sorts of core functions, which + tends to weak havok in C++ codebases that expect these to work + like regular functions (potentially with several overloads) */ +#if defined(isalnum) +# undef isalnum +# undef isalpha +# undef islower +# undef isspace +# undef isupper +# undef tolower +# undef toupper +#endif + +#if defined(copysign) +# undef copysign +#endif + +#if defined(PYBIND11_NUMPY_1_ONLY) +# define PYBIND11_INTERNAL_NUMPY_1_ONLY_DETECTED +#endif + +#if defined(PYPY_VERSION) && !defined(PYBIND11_SIMPLE_GIL_MANAGEMENT) +# define PYBIND11_SIMPLE_GIL_MANAGEMENT +#endif + +#if defined(_MSC_VER) +# if defined(PYBIND11_DEBUG_MARKER) +# define _DEBUG +# undef PYBIND11_DEBUG_MARKER +# endif +PYBIND11_WARNING_POP +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#if defined(__has_include) +# if __has_include() +# include +# endif +#endif + +// Must be after including or one of the other headers specified by the standard +#if defined(__cpp_lib_char8_t) && __cpp_lib_char8_t >= 201811L +# define PYBIND11_HAS_U8STRING +#endif + +// See description of PR #4246: +#if !defined(PYBIND11_NO_ASSERT_GIL_HELD_INCREF_DECREF) && !defined(NDEBUG) \ + && !defined(PYPY_VERSION) && !defined(PYBIND11_ASSERT_GIL_HELD_INCREF_DECREF) +# define PYBIND11_ASSERT_GIL_HELD_INCREF_DECREF +#endif + +// #define PYBIND11_STR_LEGACY_PERMISSIVE +// If DEFINED, pybind11::str can hold PyUnicodeObject or PyBytesObject +// (probably surprising and never documented, but this was the +// legacy behavior until and including v2.6.x). As a side-effect, +// pybind11::isinstance() is true for both pybind11::str and +// pybind11::bytes. +// If UNDEFINED, pybind11::str can only hold PyUnicodeObject, and +// pybind11::isinstance() is true only for pybind11::str. +// However, for Python 2 only (!), the pybind11::str caster +// implicitly decoded bytes to PyUnicodeObject. This was to ease +// the transition from the legacy behavior to the non-permissive +// behavior. + +/// Compatibility macros for Python 2 / Python 3 versions TODO: remove +#define PYBIND11_INSTANCE_METHOD_NEW(ptr, class_) PyInstanceMethod_New(ptr) +#define PYBIND11_INSTANCE_METHOD_CHECK PyInstanceMethod_Check +#define PYBIND11_INSTANCE_METHOD_GET_FUNCTION PyInstanceMethod_GET_FUNCTION +#define PYBIND11_BYTES_CHECK PyBytes_Check +#define PYBIND11_BYTES_FROM_STRING PyBytes_FromString +#define PYBIND11_BYTES_FROM_STRING_AND_SIZE PyBytes_FromStringAndSize +#define PYBIND11_BYTES_AS_STRING_AND_SIZE PyBytes_AsStringAndSize +#define PYBIND11_BYTES_AS_STRING PyBytes_AsString +#define PYBIND11_BYTES_SIZE PyBytes_Size +#define PYBIND11_LONG_CHECK(o) PyLong_Check(o) +#define PYBIND11_LONG_AS_LONGLONG(o) PyLong_AsLongLong(o) +#define PYBIND11_LONG_FROM_SIGNED(o) PyLong_FromSsize_t((ssize_t) (o)) +#define PYBIND11_LONG_FROM_UNSIGNED(o) PyLong_FromSize_t((size_t) (o)) +#define PYBIND11_BYTES_NAME "bytes" +#define PYBIND11_STRING_NAME "str" +#define PYBIND11_SLICE_OBJECT PyObject +#define PYBIND11_FROM_STRING PyUnicode_FromString +#define PYBIND11_STR_TYPE ::pybind11::str +#define PYBIND11_BOOL_ATTR "__bool__" +#define PYBIND11_NB_BOOL(ptr) ((ptr)->nb_bool) +#define PYBIND11_BUILTINS_MODULE "builtins" +// Providing a separate declaration to make Clang's -Wmissing-prototypes happy. +// See comment for PYBIND11_MODULE below for why this is marked "maybe unused". +#define PYBIND11_PLUGIN_IMPL(name) \ + extern "C" PYBIND11_MAYBE_UNUSED PYBIND11_EXPORT PyObject *PyInit_##name(); \ + extern "C" PYBIND11_EXPORT PyObject *PyInit_##name() + +#define PYBIND11_TRY_NEXT_OVERLOAD ((PyObject *) 1) // special failure return code +#define PYBIND11_STRINGIFY(x) #x +#define PYBIND11_TOSTRING(x) PYBIND11_STRINGIFY(x) +#define PYBIND11_CONCAT(first, second) first##second +#define PYBIND11_ENSURE_INTERNALS_READY pybind11::detail::get_internals(); + +#define PYBIND11_CHECK_PYTHON_VERSION \ + { \ + const char *compiled_ver \ + = PYBIND11_TOSTRING(PY_MAJOR_VERSION) "." PYBIND11_TOSTRING(PY_MINOR_VERSION); \ + const char *runtime_ver = Py_GetVersion(); \ + size_t len = std::strlen(compiled_ver); \ + if (std::strncmp(runtime_ver, compiled_ver, len) != 0 \ + || (runtime_ver[len] >= '0' && runtime_ver[len] <= '9')) { \ + PyErr_Format(PyExc_ImportError, \ + "Python version mismatch: module was compiled for Python %s, " \ + "but the interpreter version is incompatible: %s.", \ + compiled_ver, \ + runtime_ver); \ + return nullptr; \ + } \ + } + +#define PYBIND11_CATCH_INIT_EXCEPTIONS \ + catch (pybind11::error_already_set & e) { \ + pybind11::raise_from(e, PyExc_ImportError, "initialization failed"); \ + return nullptr; \ + } \ + catch (const std::exception &e) { \ + ::pybind11::set_error(PyExc_ImportError, e.what()); \ + return nullptr; \ + } + +/** \rst + ***Deprecated in favor of PYBIND11_MODULE*** + + This macro creates the entry point that will be invoked when the Python interpreter + imports a plugin library. Please create a `module_` in the function body and return + the pointer to its underlying Python object at the end. + + .. code-block:: cpp + + PYBIND11_PLUGIN(example) { + pybind11::module_ m("example", "pybind11 example plugin"); + /// Set up bindings here + return m.ptr(); + } +\endrst */ +#define PYBIND11_PLUGIN(name) \ + PYBIND11_DEPRECATED("PYBIND11_PLUGIN is deprecated, use PYBIND11_MODULE") \ + static PyObject *pybind11_init(); \ + PYBIND11_PLUGIN_IMPL(name) { \ + PYBIND11_CHECK_PYTHON_VERSION \ + PYBIND11_ENSURE_INTERNALS_READY \ + try { \ + return pybind11_init(); \ + } \ + PYBIND11_CATCH_INIT_EXCEPTIONS \ + } \ + PyObject *pybind11_init() + +/** \rst + This macro creates the entry point that will be invoked when the Python interpreter + imports an extension module. The module name is given as the first argument and it + should not be in quotes. The second macro argument defines a variable of type + `py::module_` which can be used to initialize the module. + + The entry point is marked as "maybe unused" to aid dead-code detection analysis: + since the entry point is typically only looked up at runtime and not referenced + during translation, it would otherwise appear as unused ("dead") code. + + .. code-block:: cpp + + PYBIND11_MODULE(example, m) { + m.doc() = "pybind11 example module"; + + // Add bindings here + m.def("foo", []() { + return "Hello, World!"; + }); + } + + The third macro argument is optional (available since 2.13.0), and can be used to + mark the extension module as safe to run without the GIL under a free-threaded CPython + interpreter. Passing this argument has no effect on other interpreters. + + .. code-block:: cpp + + PYBIND11_MODULE(example, m, py::mod_gil_not_used()) { + m.doc() = "pybind11 example module safe to run without the GIL"; + + // Add bindings here + m.def("foo", []() { + return "Hello, Free-threaded World!"; + }); + } + +\endrst */ +PYBIND11_WARNING_PUSH +PYBIND11_WARNING_DISABLE_CLANG("-Wgnu-zero-variadic-macro-arguments") +#define PYBIND11_MODULE(name, variable, ...) \ + static ::pybind11::module_::module_def PYBIND11_CONCAT(pybind11_module_def_, name) \ + PYBIND11_MAYBE_UNUSED; \ + PYBIND11_MAYBE_UNUSED \ + static void PYBIND11_CONCAT(pybind11_init_, name)(::pybind11::module_ &); \ + PYBIND11_PLUGIN_IMPL(name) { \ + PYBIND11_CHECK_PYTHON_VERSION \ + PYBIND11_ENSURE_INTERNALS_READY \ + auto m = ::pybind11::module_::create_extension_module( \ + PYBIND11_TOSTRING(name), \ + nullptr, \ + &PYBIND11_CONCAT(pybind11_module_def_, name), \ + ##__VA_ARGS__); \ + try { \ + PYBIND11_CONCAT(pybind11_init_, name)(m); \ + return m.ptr(); \ + } \ + PYBIND11_CATCH_INIT_EXCEPTIONS \ + } \ + void PYBIND11_CONCAT(pybind11_init_, name)(::pybind11::module_ & (variable)) +PYBIND11_WARNING_POP + +PYBIND11_NAMESPACE_BEGIN(PYBIND11_NAMESPACE) + +using ssize_t = Py_ssize_t; +using size_t = std::size_t; + +template +inline ssize_t ssize_t_cast(const IntType &val) { + static_assert(sizeof(IntType) <= sizeof(ssize_t), "Implicit narrowing is not permitted."); + return static_cast(val); +} + +/// Approach used to cast a previously unknown C++ instance into a Python object +enum class return_value_policy : uint8_t { + /** This is the default return value policy, which falls back to the policy + return_value_policy::take_ownership when the return value is a pointer. + Otherwise, it uses return_value::move or return_value::copy for rvalue + and lvalue references, respectively. See below for a description of what + all of these different policies do. */ + automatic = 0, + + /** As above, but use policy return_value_policy::reference when the return + value is a pointer. This is the default conversion policy for function + arguments when calling Python functions manually from C++ code (i.e. via + handle::operator()). You probably won't need to use this. */ + automatic_reference, + + /** Reference an existing object (i.e. do not create a new copy) and take + ownership. Python will call the destructor and delete operator when the + object's reference count reaches zero. Undefined behavior ensues when + the C++ side does the same.. */ + take_ownership, + + /** Create a new copy of the returned object, which will be owned by + Python. This policy is comparably safe because the lifetimes of the two + instances are decoupled. */ + copy, + + /** Use std::move to move the return value contents into a new instance + that will be owned by Python. This policy is comparably safe because the + lifetimes of the two instances (move source and destination) are + decoupled. */ + move, + + /** Reference an existing object, but do not take ownership. The C++ side + is responsible for managing the object's lifetime and deallocating it + when it is no longer used. Warning: undefined behavior will ensue when + the C++ side deletes an object that is still referenced and used by + Python. */ + reference, + + /** This policy only applies to methods and properties. It references the + object without taking ownership similar to the above + return_value_policy::reference policy. In contrast to that policy, the + function or property's implicit this argument (called the parent) is + considered to be the owner of the return value (the child). + pybind11 then couples the lifetime of the parent to the child via a + reference relationship that ensures that the parent cannot be garbage + collected while Python is still using the child. More advanced + variations of this scheme are also possible using combinations of + return_value_policy::reference and the keep_alive call policy */ + reference_internal +}; + +PYBIND11_NAMESPACE_BEGIN(detail) + +inline static constexpr int log2(size_t n, int k = 0) { + return (n <= 1) ? k : log2(n >> 1, k + 1); +} + +// Returns the size as a multiple of sizeof(void *), rounded up. +inline static constexpr size_t size_in_ptrs(size_t s) { + return 1 + ((s - 1) >> log2(sizeof(void *))); +} + +/** + * The space to allocate for simple layout instance holders (see below) in multiple of the size of + * a pointer (e.g. 2 means 16 bytes on 64-bit architectures). The default is the minimum required + * to holder either a std::unique_ptr or std::shared_ptr (which is almost always + * sizeof(std::shared_ptr)). + */ +constexpr size_t instance_simple_holder_in_ptrs() { + static_assert(sizeof(std::shared_ptr) >= sizeof(std::unique_ptr), + "pybind assumes std::shared_ptrs are at least as big as std::unique_ptrs"); + return size_in_ptrs(sizeof(std::shared_ptr)); +} + +// Forward declarations +struct type_info; +struct value_and_holder; + +struct nonsimple_values_and_holders { + void **values_and_holders; + uint8_t *status; +}; + +/// The 'instance' type which needs to be standard layout (need to be able to use 'offsetof') +struct instance { + PyObject_HEAD + /// Storage for pointers and holder; see simple_layout, below, for a description + union { + void *simple_value_holder[1 + instance_simple_holder_in_ptrs()]; + nonsimple_values_and_holders nonsimple; + }; + /// Weak references + PyObject *weakrefs; + /// If true, the pointer is owned which means we're free to manage it with a holder. + bool owned : 1; + /** + * An instance has two possible value/holder layouts. + * + * Simple layout (when this flag is true), means the `simple_value_holder` is set with a + * pointer and the holder object governing that pointer, i.e. [val1*][holder]. This layout is + * applied whenever there is no python-side multiple inheritance of bound C++ types *and* the + * type's holder will fit in the default space (which is large enough to hold either a + * std::unique_ptr or std::shared_ptr). + * + * Non-simple layout applies when using custom holders that require more space than + * `shared_ptr` (which is typically the size of two pointers), or when multiple inheritance is + * used on the python side. Non-simple layout allocates the required amount of memory to have + * multiple bound C++ classes as parents. Under this layout, `nonsimple.values_and_holders` is + * set to a pointer to allocated space of the required space to hold a sequence of value + * pointers and holders followed `status`, a set of bit flags (1 byte each), i.e. + * [val1*][holder1][val2*][holder2]...[bb...] where each [block] is rounded up to a multiple + * of `sizeof(void *)`. `nonsimple.status` is, for convenience, a pointer to the beginning of + * the [bb...] block (but not independently allocated). + * + * Status bits indicate whether the associated holder is constructed (& + * status_holder_constructed) and whether the value pointer is registered (& + * status_instance_registered) in `registered_instances`. + */ + bool simple_layout : 1; + /// For simple layout, tracks whether the holder has been constructed + bool simple_holder_constructed : 1; + /// For simple layout, tracks whether the instance is registered in `registered_instances` + bool simple_instance_registered : 1; + /// If true, get_internals().patients has an entry for this object + bool has_patients : 1; + + /// Initializes all of the above type/values/holders data (but not the instance values + /// themselves) + void allocate_layout(); + + /// Destroys/deallocates all of the above + void deallocate_layout(); + + /// Returns the value_and_holder wrapper for the given type (or the first, if `find_type` + /// omitted). Returns a default-constructed (with `.inst = nullptr`) object on failure if + /// `throw_if_missing` is false. + value_and_holder get_value_and_holder(const type_info *find_type = nullptr, + bool throw_if_missing = true); + + /// Bit values for the non-simple status flags + static constexpr uint8_t status_holder_constructed = 1; + static constexpr uint8_t status_instance_registered = 2; +}; + +static_assert(std::is_standard_layout::value, + "Internal error: `pybind11::detail::instance` is not standard layout!"); + +/// from __cpp_future__ import (convenient aliases from C++14/17) +#if defined(PYBIND11_CPP14) +using std::conditional_t; +using std::enable_if_t; +using std::remove_cv_t; +using std::remove_reference_t; +#else +template +using enable_if_t = typename std::enable_if::type; +template +using conditional_t = typename std::conditional::type; +template +using remove_cv_t = typename std::remove_cv::type; +template +using remove_reference_t = typename std::remove_reference::type; +#endif + +#if defined(PYBIND11_CPP20) +using std::remove_cvref; +using std::remove_cvref_t; +#else +template +struct remove_cvref { + using type = remove_cv_t>; +}; +template +using remove_cvref_t = typename remove_cvref::type; +#endif + +/// Example usage: is_same_ignoring_cvref::value +template +using is_same_ignoring_cvref = std::is_same, U>; + +/// Index sequences +#if defined(PYBIND11_CPP14) +using std::index_sequence; +using std::make_index_sequence; +#else +template +struct index_sequence {}; +template +struct make_index_sequence_impl : make_index_sequence_impl {}; +template +struct make_index_sequence_impl<0, S...> { + using type = index_sequence; +}; +template +using make_index_sequence = typename make_index_sequence_impl::type; +#endif + +/// Make an index sequence of the indices of true arguments +template +struct select_indices_impl { + using type = ISeq; +}; +template +struct select_indices_impl, I, B, Bs...> + : select_indices_impl, index_sequence>, + I + 1, + Bs...> {}; +template +using select_indices = typename select_indices_impl, 0, Bs...>::type; + +/// Backports of std::bool_constant and std::negation to accommodate older compilers +template +using bool_constant = std::integral_constant; +template +struct negation : bool_constant {}; + +// PGI/Intel cannot detect operator delete with the "compatible" void_t impl, so +// using the new one (C++14 defect, so generally works on newer compilers, even +// if not in C++17 mode) +#if defined(__PGIC__) || defined(__INTEL_COMPILER) +template +using void_t = void; +#else +template +struct void_t_impl { + using type = void; +}; +template +using void_t = typename void_t_impl::type; +#endif + +/// Compile-time all/any/none of that check the boolean value of all template types +#if defined(__cpp_fold_expressions) && !(defined(_MSC_VER) && (_MSC_VER < 1916)) +template +using all_of = bool_constant<(Ts::value && ...)>; +template +using any_of = bool_constant<(Ts::value || ...)>; +#elif !defined(_MSC_VER) +template +struct bools {}; +template +using all_of = std::is_same, bools>; +template +using any_of = negation...>>; +#else +// MSVC has trouble with the above, but supports std::conjunction, which we can use instead (albeit +// at a slight loss of compilation efficiency). +template +using all_of = std::conjunction; +template +using any_of = std::disjunction; +#endif +template +using none_of = negation>; + +template class... Predicates> +using satisfies_all_of = all_of...>; +template class... Predicates> +using satisfies_any_of = any_of...>; +template class... Predicates> +using satisfies_none_of = none_of...>; + +/// Strip the class from a method type +template +struct remove_class {}; +template +struct remove_class { + using type = R(A...); +}; +template +struct remove_class { + using type = R(A...); +}; +#ifdef __cpp_noexcept_function_type +template +struct remove_class { + using type = R(A...); +}; +template +struct remove_class { + using type = R(A...); +}; +#endif +/// Helper template to strip away type modifiers +template +struct intrinsic_type { + using type = T; +}; +template +struct intrinsic_type { + using type = typename intrinsic_type::type; +}; +template +struct intrinsic_type { + using type = typename intrinsic_type::type; +}; +template +struct intrinsic_type { + using type = typename intrinsic_type::type; +}; +template +struct intrinsic_type { + using type = typename intrinsic_type::type; +}; +template +struct intrinsic_type { + using type = typename intrinsic_type::type; +}; +template +struct intrinsic_type { + using type = typename intrinsic_type::type; +}; +template +using intrinsic_t = typename intrinsic_type::type; + +/// Helper type to replace 'void' in some expressions +struct void_type {}; + +/// Helper template which holds a list of types +template +struct type_list {}; + +/// Compile-time integer sum +#ifdef __cpp_fold_expressions +template +constexpr size_t constexpr_sum(Ts... ns) { + return (0 + ... + size_t{ns}); +} +#else +constexpr size_t constexpr_sum() { return 0; } +template +constexpr size_t constexpr_sum(T n, Ts... ns) { + return size_t{n} + constexpr_sum(ns...); +} +#endif + +PYBIND11_NAMESPACE_BEGIN(constexpr_impl) +/// Implementation details for constexpr functions +constexpr int first(int i) { return i; } +template +constexpr int first(int i, T v, Ts... vs) { + return v ? i : first(i + 1, vs...); +} + +constexpr int last(int /*i*/, int result) { return result; } +template +constexpr int last(int i, int result, T v, Ts... vs) { + return last(i + 1, v ? i : result, vs...); +} +PYBIND11_NAMESPACE_END(constexpr_impl) + +/// Return the index of the first type in Ts which satisfies Predicate. +/// Returns sizeof...(Ts) if none match. +template