diff --git a/.venv/lib/python3.11/site-packages/google/api/backend.proto b/.venv/lib/python3.11/site-packages/google/api/backend.proto new file mode 100644 index 0000000000000000000000000000000000000000..499737a3e418c5cdb93a87dce502a48aff1591d7 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/api/backend.proto @@ -0,0 +1,185 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; +option java_multiple_files = true; +option java_outer_classname = "BackendProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// `Backend` defines the backend configuration for a service. +message Backend { + // A list of API backend rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated BackendRule rules = 1; +} + +// A backend rule provides configuration for an individual API element. +message BackendRule { + // Path Translation specifies how to combine the backend address with the + // request path in order to produce the appropriate forwarding URL for the + // request. + // + // Path Translation is applicable only to HTTP-based backends. Backends which + // do not accept requests over HTTP/HTTPS should leave `path_translation` + // unspecified. + enum PathTranslation { + PATH_TRANSLATION_UNSPECIFIED = 0; + + // Use the backend address as-is, with no modification to the path. If the + // URL pattern contains variables, the variable names and values will be + // appended to the query string. If a query string parameter and a URL + // pattern variable have the same name, this may result in duplicate keys in + // the query string. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.cloudfunctions.net/getUser + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.cloudfunctions.net/getUser?cid=widgetworks&uid=johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.cloudfunctions.net/getUser?timezone=EST&cid=widgetworks&uid=johndoe + CONSTANT_ADDRESS = 1; + + // The request path will be appended to the backend address. + // + // # Examples + // + // Given the following operation config: + // + // Method path: /api/company/{cid}/user/{uid} + // Backend address: https://example.appspot.com + // + // Requests to the following request paths will call the backend at the + // translated path: + // + // Request path: /api/company/widgetworks/user/johndoe + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe + // + // Request path: /api/company/widgetworks/user/johndoe?timezone=EST + // Translated: + // https://example.appspot.com/api/company/widgetworks/user/johndoe?timezone=EST + APPEND_PATH_TO_ADDRESS = 2; + } + + // Selects the methods to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax + // details. + string selector = 1; + + // The address of the API backend. + // + // The scheme is used to determine the backend protocol and security. + // The following schemes are accepted: + // + // SCHEME PROTOCOL SECURITY + // http:// HTTP None + // https:// HTTP TLS + // grpc:// gRPC None + // grpcs:// gRPC TLS + // + // It is recommended to explicitly include a scheme. Leaving out the scheme + // may cause constrasting behaviors across platforms. + // + // If the port is unspecified, the default is: + // - 80 for schemes without TLS + // - 443 for schemes with TLS + // + // For HTTP backends, use [protocol][google.api.BackendRule.protocol] + // to specify the protocol version. + string address = 2; + + // The number of seconds to wait for a response from a request. The default + // varies based on the request protocol and deployment environment. + double deadline = 3; + + // Deprecated, do not use. + double min_deadline = 4 [deprecated = true]; + + // The number of seconds to wait for the completion of a long running + // operation. The default is no deadline. + double operation_deadline = 5; + + PathTranslation path_translation = 6; + + // Authentication settings used by the backend. + // + // These are typically used to provide service management functionality to + // a backend served on a publicly-routable URL. The `authentication` + // details should match the authentication behavior used by the backend. + // + // For example, specifying `jwt_audience` implies that the backend expects + // authentication via a JWT. + // + // When authentication is unspecified, the resulting behavior is the same + // as `disable_auth` set to `true`. + // + // Refer to https://developers.google.com/identity/protocols/OpenIDConnect for + // JWT ID token. + oneof authentication { + // The JWT audience is used when generating a JWT ID token for the backend. + // This ID token will be added in the HTTP "authorization" header, and sent + // to the backend. + string jwt_audience = 7; + + // When disable_auth is true, a JWT ID token won't be generated and the + // original "Authorization" HTTP header will be preserved. If the header is + // used to carry the original token and is expected by the backend, this + // field must be set to true to preserve the header. + bool disable_auth = 8; + } + + // The protocol used for sending a request to the backend. + // The supported values are "http/1.1" and "h2". + // + // The default value is inferred from the scheme in the + // [address][google.api.BackendRule.address] field: + // + // SCHEME PROTOCOL + // http:// http/1.1 + // https:// http/1.1 + // grpc:// h2 + // grpcs:// h2 + // + // For secure HTTP backends (https://) that support HTTP/2, set this field + // to "h2" for improved performance. + // + // Configuring this field to non-default values is only supported for secure + // HTTP backends. This field will be ignored for all other backends. + // + // See + // https://www.iana.org/assignments/tls-extensiontype-values/tls-extensiontype-values.xhtml#alpn-protocol-ids + // for more details on the supported values. + string protocol = 9; + + // The map between request protocol and the backend address. + map overrides_by_request_protocol = 10; +} diff --git a/.venv/lib/python3.11/site-packages/google/api/client.proto b/.venv/lib/python3.11/site-packages/google/api/client.proto new file mode 100644 index 0000000000000000000000000000000000000000..6003be5307afd565cc3e91b77821d1eb0346dda4 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/api/client.proto @@ -0,0 +1,462 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/launch_stage.proto"; +import "google/protobuf/descriptor.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "ClientProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // A definition of a client library method signature. + // + // In client libraries, each proto RPC corresponds to one or more methods + // which the end user is able to call, and calls the underlying RPC. + // Normally, this method receives a single argument (a struct or instance + // corresponding to the RPC request object). Defining this field will + // add one or more overloads providing flattened or simpler method signatures + // in some languages. + // + // The fields on the method signature are provided as a comma-separated + // string. + // + // For example, the proto RPC and annotation: + // + // rpc CreateSubscription(CreateSubscriptionRequest) + // returns (Subscription) { + // option (google.api.method_signature) = "name,topic"; + // } + // + // Would add the following Java overload (in addition to the method accepting + // the request object): + // + // public final Subscription createSubscription(String name, String topic) + // + // The following backwards-compatibility guidelines apply: + // + // * Adding this annotation to an unannotated method is backwards + // compatible. + // * Adding this annotation to a method which already has existing + // method signature annotations is backwards compatible if and only if + // the new method signature annotation is last in the sequence. + // * Modifying or removing an existing method signature annotation is + // a breaking change. + // * Re-ordering existing method signature annotations is a breaking + // change. + repeated string method_signature = 1051; +} + +extend google.protobuf.ServiceOptions { + // The hostname for this service. + // This should be specified with no prefix or protocol. + // + // Example: + // + // service Foo { + // option (google.api.default_host) = "foo.googleapi.com"; + // ... + // } + string default_host = 1049; + + // OAuth scopes needed for the client. + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform"; + // ... + // } + // + // If there is more than one scope, use a comma-separated string: + // + // Example: + // + // service Foo { + // option (google.api.oauth_scopes) = \ + // "https://www.googleapis.com/auth/cloud-platform," + // "https://www.googleapis.com/auth/monitoring"; + // ... + // } + string oauth_scopes = 1050; + + // The API version of this service, which should be sent by version-aware + // clients to the service. This allows services to abide by the schema and + // behavior of the service at the time this API version was deployed. + // The format of the API version must be treated as opaque by clients. + // Services may use a format with an apparent structure, but clients must + // not rely on this to determine components within an API version, or attempt + // to construct other valid API versions. Note that this is for upcoming + // functionality and may not be implemented for all services. + // + // Example: + // + // service Foo { + // option (google.api.api_version) = "v1_20230821_preview"; + // } + string api_version = 525000001; +} + +// Required information for every language. +message CommonLanguageSettings { + // Link to automatically generated reference documentation. Example: + // https://cloud.google.com/nodejs/docs/reference/asset/latest + string reference_docs_uri = 1 [deprecated = true]; + + // The destination where API teams want this client library to be published. + repeated ClientLibraryDestination destinations = 2; + + // Configuration for which RPCs should be generated in the GAPIC client. + SelectiveGapicGeneration selective_gapic_generation = 3; +} + +// Details about how and where to publish client libraries. +message ClientLibrarySettings { + // Version of the API to apply these settings to. This is the full protobuf + // package for the API, ending in the version element. + // Examples: "google.cloud.speech.v1" and "google.spanner.admin.database.v1". + string version = 1; + + // Launch stage of this version of the API. + LaunchStage launch_stage = 2; + + // When using transport=rest, the client request will encode enums as + // numbers rather than strings. + bool rest_numeric_enums = 3; + + // Settings for legacy Java features, supported in the Service YAML. + JavaSettings java_settings = 21; + + // Settings for C++ client libraries. + CppSettings cpp_settings = 22; + + // Settings for PHP client libraries. + PhpSettings php_settings = 23; + + // Settings for Python client libraries. + PythonSettings python_settings = 24; + + // Settings for Node client libraries. + NodeSettings node_settings = 25; + + // Settings for .NET client libraries. + DotnetSettings dotnet_settings = 26; + + // Settings for Ruby client libraries. + RubySettings ruby_settings = 27; + + // Settings for Go client libraries. + GoSettings go_settings = 28; +} + +// This message configures the settings for publishing [Google Cloud Client +// libraries](https://cloud.google.com/apis/docs/cloud-client-libraries) +// generated from the service config. +message Publishing { + // A list of API method settings, e.g. the behavior for methods that use the + // long-running operation pattern. + repeated MethodSettings method_settings = 2; + + // Link to a *public* URI where users can report issues. Example: + // https://issuetracker.google.com/issues/new?component=190865&template=1161103 + string new_issue_uri = 101; + + // Link to product home page. Example: + // https://cloud.google.com/asset-inventory/docs/overview + string documentation_uri = 102; + + // Used as a tracking tag when collecting data about the APIs developer + // relations artifacts like docs, packages delivered to package managers, + // etc. Example: "speech". + string api_short_name = 103; + + // GitHub label to apply to issues and pull requests opened for this API. + string github_label = 104; + + // GitHub teams to be added to CODEOWNERS in the directory in GitHub + // containing source code for the client libraries for this API. + repeated string codeowner_github_teams = 105; + + // A prefix used in sample code when demarking regions to be included in + // documentation. + string doc_tag_prefix = 106; + + // For whom the client library is being published. + ClientLibraryOrganization organization = 107; + + // Client library settings. If the same version string appears multiple + // times in this list, then the last one wins. Settings from earlier + // settings with the same version string are discarded. + repeated ClientLibrarySettings library_settings = 109; + + // Optional link to proto reference documentation. Example: + // https://cloud.google.com/pubsub/lite/docs/reference/rpc + string proto_reference_documentation_uri = 110; + + // Optional link to REST reference documentation. Example: + // https://cloud.google.com/pubsub/lite/docs/reference/rest + string rest_reference_documentation_uri = 111; +} + +// Settings for Java client libraries. +message JavaSettings { + // The package name to use in Java. Clobbers the java_package option + // set in the protobuf. This should be used **only** by APIs + // who have already set the language_settings.java.package_name" field + // in gapic.yaml. API teams should use the protobuf java_package option + // where possible. + // + // Example of a YAML configuration:: + // + // publishing: + // java_settings: + // library_package: com.google.cloud.pubsub.v1 + string library_package = 1; + + // Configure the Java class name to use instead of the service's for its + // corresponding generated GAPIC client. Keys are fully-qualified + // service names as they appear in the protobuf (including the full + // the language_settings.java.interface_names" field in gapic.yaml. API + // teams should otherwise use the service name as it appears in the + // protobuf. + // + // Example of a YAML configuration:: + // + // publishing: + // java_settings: + // service_class_names: + // - google.pubsub.v1.Publisher: TopicAdmin + // - google.pubsub.v1.Subscriber: SubscriptionAdmin + map service_class_names = 2; + + // Some settings. + CommonLanguageSettings common = 3; +} + +// Settings for C++ client libraries. +message CppSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Php client libraries. +message PhpSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Python client libraries. +message PythonSettings { + // Experimental features to be included during client library generation. + // These fields will be deprecated once the feature graduates and is enabled + // by default. + message ExperimentalFeatures { + // Enables generation of asynchronous REST clients if `rest` transport is + // enabled. By default, asynchronous REST clients will not be generated. + // This feature will be enabled by default 1 month after launching the + // feature in preview packages. + bool rest_async_io_enabled = 1; + + // Enables generation of protobuf code using new types that are more + // Pythonic which are included in `protobuf>=5.29.x`. This feature will be + // enabled by default 1 month after launching the feature in preview + // packages. + bool protobuf_pythonic_types_enabled = 2; + } + + // Some settings. + CommonLanguageSettings common = 1; + + // Experimental features to be included during client library generation. + ExperimentalFeatures experimental_features = 2; +} + +// Settings for Node client libraries. +message NodeSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Dotnet client libraries. +message DotnetSettings { + // Some settings. + CommonLanguageSettings common = 1; + + // Map from original service names to renamed versions. + // This is used when the default generated types + // would cause a naming conflict. (Neither name is + // fully-qualified.) + // Example: Subscriber to SubscriberServiceApi. + map renamed_services = 2; + + // Map from full resource types to the effective short name + // for the resource. This is used when otherwise resource + // named from different services would cause naming collisions. + // Example entry: + // "datalabeling.googleapis.com/Dataset": "DataLabelingDataset" + map renamed_resources = 3; + + // List of full resource types to ignore during generation. + // This is typically used for API-specific Location resources, + // which should be handled by the generator as if they were actually + // the common Location resources. + // Example entry: "documentai.googleapis.com/Location" + repeated string ignored_resources = 4; + + // Namespaces which must be aliased in snippets due to + // a known (but non-generator-predictable) naming collision + repeated string forced_namespace_aliases = 5; + + // Method signatures (in the form "service.method(signature)") + // which are provided separately, so shouldn't be generated. + // Snippets *calling* these methods are still generated, however. + repeated string handwritten_signatures = 6; +} + +// Settings for Ruby client libraries. +message RubySettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Settings for Go client libraries. +message GoSettings { + // Some settings. + CommonLanguageSettings common = 1; +} + +// Describes the generator configuration for a method. +message MethodSettings { + // Describes settings to use when generating API methods that use the + // long-running operation pattern. + // All default values below are from those used in the client library + // generators (e.g. + // [Java](https://github.com/googleapis/gapic-generator-java/blob/04c2faa191a9b5a10b92392fe8482279c4404803/src/main/java/com/google/api/generator/gapic/composer/common/RetrySettingsComposer.java)). + message LongRunning { + // Initial delay after which the first poll request will be made. + // Default value: 5 seconds. + google.protobuf.Duration initial_poll_delay = 1; + + // Multiplier to gradually increase delay between subsequent polls until it + // reaches max_poll_delay. + // Default value: 1.5. + float poll_delay_multiplier = 2; + + // Maximum time between two subsequent poll requests. + // Default value: 45 seconds. + google.protobuf.Duration max_poll_delay = 3; + + // Total polling timeout. + // Default value: 5 minutes. + google.protobuf.Duration total_poll_timeout = 4; + } + + // The fully qualified name of the method, for which the options below apply. + // This is used to find the method to apply the options. + // + // Example: + // + // publishing: + // method_settings: + // - selector: google.storage.control.v2.StorageControl.CreateFolder + // # method settings for CreateFolder... + string selector = 1; + + // Describes settings to use for long-running operations when generating + // API methods for RPCs. Complements RPCs that use the annotations in + // google/longrunning/operations.proto. + // + // Example of a YAML configuration:: + // + // publishing: + // method_settings: + // - selector: google.cloud.speech.v2.Speech.BatchRecognize + // long_running: + // initial_poll_delay: 60s # 1 minute + // poll_delay_multiplier: 1.5 + // max_poll_delay: 360s # 6 minutes + // total_poll_timeout: 54000s # 90 minutes + LongRunning long_running = 2; + + // List of top-level fields of the request message, that should be + // automatically populated by the client libraries based on their + // (google.api.field_info).format. Currently supported format: UUID4. + // + // Example of a YAML configuration: + // + // publishing: + // method_settings: + // - selector: google.example.v1.ExampleService.CreateExample + // auto_populated_fields: + // - request_id + repeated string auto_populated_fields = 3; +} + +// The organization for which the client libraries are being published. +// Affects the url where generated docs are published, etc. +enum ClientLibraryOrganization { + // Not useful. + CLIENT_LIBRARY_ORGANIZATION_UNSPECIFIED = 0; + + // Google Cloud Platform Org. + CLOUD = 1; + + // Ads (Advertising) Org. + ADS = 2; + + // Photos Org. + PHOTOS = 3; + + // Street View Org. + STREET_VIEW = 4; + + // Shopping Org. + SHOPPING = 5; + + // Geo Org. + GEO = 6; + + // Generative AI - https://developers.generativeai.google + GENERATIVE_AI = 7; +} + +// To where should client libraries be published? +enum ClientLibraryDestination { + // Client libraries will neither be generated nor published to package + // managers. + CLIENT_LIBRARY_DESTINATION_UNSPECIFIED = 0; + + // Generate the client library in a repo under github.com/googleapis, + // but don't publish it to package managers. + GITHUB = 10; + + // Publish the library to package managers like nuget.org and npmjs.com. + PACKAGE_MANAGER = 20; +} + +// This message is used to configure the generation of a subset of the RPCs in +// a service for client libraries. +message SelectiveGapicGeneration { + // An allowlist of the fully qualified names of RPCs that should be included + // on public client surfaces. + repeated string methods = 1; +} diff --git a/.venv/lib/python3.11/site-packages/google/api/field_behavior.proto b/.venv/lib/python3.11/site-packages/google/api/field_behavior.proto new file mode 100644 index 0000000000000000000000000000000000000000..2865ba05373934fa0a9aa57ee9dd1533b0116d36 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/api/field_behavior.proto @@ -0,0 +1,104 @@ +// Copyright 2024 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/descriptor.proto"; + +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "FieldBehaviorProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.FieldOptions { + // A designation of a specific field behavior (required, output only, etc.) + // in protobuf messages. + // + // Examples: + // + // string name = 1 [(google.api.field_behavior) = REQUIRED]; + // State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + // google.protobuf.Duration ttl = 1 + // [(google.api.field_behavior) = INPUT_ONLY]; + // google.protobuf.Timestamp expire_time = 1 + // [(google.api.field_behavior) = OUTPUT_ONLY, + // (google.api.field_behavior) = IMMUTABLE]; + repeated google.api.FieldBehavior field_behavior = 1052 [packed = false]; +} + +// An indicator of the behavior of a given field (for example, that a field +// is required in requests, or given as output but ignored as input). +// This **does not** change the behavior in protocol buffers itself; it only +// denotes the behavior and may affect how API tooling handles the field. +// +// Note: This enum **may** receive new values in the future. +enum FieldBehavior { + // Conventional default for enums. Do not use this. + FIELD_BEHAVIOR_UNSPECIFIED = 0; + + // Specifically denotes a field as optional. + // While all fields in protocol buffers are optional, this may be specified + // for emphasis if appropriate. + OPTIONAL = 1; + + // Denotes a field as required. + // This indicates that the field **must** be provided as part of the request, + // and failure to do so will cause an error (usually `INVALID_ARGUMENT`). + REQUIRED = 2; + + // Denotes a field as output only. + // This indicates that the field is provided in responses, but including the + // field in a request does nothing (the server *must* ignore it and + // *must not* throw an error as a result of the field's presence). + OUTPUT_ONLY = 3; + + // Denotes a field as input only. + // This indicates that the field is provided in requests, and the + // corresponding field is not included in output. + INPUT_ONLY = 4; + + // Denotes a field as immutable. + // This indicates that the field may be set once in a request to create a + // resource, but may not be changed thereafter. + IMMUTABLE = 5; + + // Denotes that a (repeated) field is an unordered list. + // This indicates that the service may provide the elements of the list + // in any arbitrary order, rather than the order the user originally + // provided. Additionally, the list's order may or may not be stable. + UNORDERED_LIST = 6; + + // Denotes that this field returns a non-empty default value if not set. + // This indicates that if the user provides the empty value in a request, + // a non-empty value will be returned. The user will not be aware of what + // non-empty value to expect. + NON_EMPTY_DEFAULT = 7; + + // Denotes that the field in a resource (a message annotated with + // google.api.resource) is used in the resource name to uniquely identify the + // resource. For AIP-compliant APIs, this should only be applied to the + // `name` field on the resource. + // + // This behavior should not be applied to references to other resources within + // the message. + // + // The identifier field of resources often have different field behavior + // depending on the request it is embedded in (e.g. for Create methods name + // is optional and unused, while for Update methods it is required). Instead + // of method-specific annotations, only `IDENTIFIER` is required. + IDENTIFIER = 8; +} diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc134337e3d5658c2a0f3751f58ff3772fecbab3 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/client.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/client.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85f0f93f1878fc831a4238435e22bbd483894c3a Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/client.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/discuss.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/discuss.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b5186f370fd82d03cd8ba1b91768cd96529d638b Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/discuss.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/protos.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/protos.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d4a23f7532aed9459a03bfa41515de5facb215e Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/protos.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/utils.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/utils.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3283efa5c080845ce2e388411315e87dd66e8714 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/__pycache__/utils.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__init__.py b/.venv/lib/python3.11/site-packages/google/generativeai/types/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1e78537461f9271fb356cbad6ed866aa3dc31c08 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/generativeai/types/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""A collection of type definitions used throughout the library.""" + +from google.generativeai.types.citation_types import * +from google.generativeai.types.content_types import * +from google.generativeai.types.file_types import * +from google.generativeai.types.generation_types import * +from google.generativeai.types.helper_types import * +from google.generativeai.types.model_types import * +from google.generativeai.types.permission_types import * +from google.generativeai.types.safety_types import * + + +del model_types +del citation_types +del safety_types diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/citation_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/citation_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91dd5237a49043fd2e381e7eef458facf9fcd7f7 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/citation_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/file_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/file_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7be7e42bfaf3be93f84bb7cd740bdb28b0872714 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/file_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/generation_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/generation_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ba89828e81cd0f6abd6bb2022917bb9232a8a6c Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/generation_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/palm_safety_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/palm_safety_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f438e3538c6f2d9ec2d25738c7b0f00095a57fdc Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/palm_safety_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/permission_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/permission_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9a54d3644f66aec503a32e4359feedf8e3c1224c Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/permission_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/text_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/text_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..45277d901e70f12f094ac79c86d0b59cb8d25c2b Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/generativeai/types/__pycache__/text_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/citation_types.py b/.venv/lib/python3.11/site-packages/google/generativeai/types/citation_types.py new file mode 100644 index 0000000000000000000000000000000000000000..9f169703fffabe58f4a688fe57bf342586628452 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/generativeai/types/citation_types.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations +from typing import List + +from typing_extensions import TypedDict + +from google.generativeai import protos +from google.generativeai import string_utils + + +__all__ = [ + "CitationMetadataDict", + "CitationSourceDict", +] + + +class CitationSourceDict(TypedDict): + start_index: int | None + end_index: int | None + uri: str | None + license: str | None + + __doc__ = string_utils.strip_oneof(protos.CitationSource.__doc__) + + +class CitationMetadataDict(TypedDict): + citation_sources: List[CitationSourceDict | None] + + __doc__ = string_utils.strip_oneof(protos.CitationMetadata.__doc__) diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/helper_types.py b/.venv/lib/python3.11/site-packages/google/generativeai/types/helper_types.py new file mode 100644 index 0000000000000000000000000000000000000000..fd8c1882b5fa172018d3ac1da1eaa954b61ac5c9 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/generativeai/types/helper_types.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import google.api_core.timeout +import google.api_core.retry + +import collections +import dataclasses + +from typing import Union +from typing_extensions import TypedDict + +__all__ = ["RequestOptions", "RequestOptionsType"] + + +class RequestOptionsDict(TypedDict, total=False): + retry: google.api_core.retry.Retry + timeout: Union[int, float, google.api_core.timeout.TimeToDeadlineTimeout] + + +@dataclasses.dataclass(init=False) +class RequestOptions(collections.abc.Mapping): + """Request options + + >>> import google.generativeai as genai + >>> from google.generativeai.types import RequestOptions + >>> from google.api_core import retry + >>> + >>> model = genai.GenerativeModel() + >>> response = model.generate_content('Hello', + ... request_options=RequestOptions( + ... retry=retry.Retry(initial=10, multiplier=2, maximum=60, timeout=300))) + >>> response = model.generate_content('Hello', + ... request_options=RequestOptions(timeout=600))) + + Args: + retry: Refer to [retry docs](https://googleapis.dev/python/google-api-core/latest/retry.html) for details. + timeout: In seconds (or provide a [TimeToDeadlineTimeout](https://googleapis.dev/python/google-api-core/latest/timeout.html) object). + """ + + retry: google.api_core.retry.Retry | None + timeout: int | float | google.api_core.timeout.TimeToDeadlineTimeout | None + + def __init__( + self, + *, + retry: google.api_core.retry.Retry | None = None, + timeout: int | float | google.api_core.timeout.TimeToDeadlineTimeout | None = None, + ): + self.retry = retry + self.timeout = timeout + + # Inherit from Mapping for **unpacking + def __getitem__(self, item): + if item == "retry": + return self.retry + elif item == "timeout": + return self.timeout + else: + raise KeyError( + f"Invalid key: 'RequestOptions' does not contain a key named '{item}'. " + "Please use a valid key." + ) + + def __iter__(self): + yield "retry" + yield "timeout" + + def __len__(self): + return 2 + + +RequestOptionsType = Union[RequestOptions, RequestOptionsDict] diff --git a/.venv/lib/python3.11/site-packages/google/generativeai/types/retriever_types.py b/.venv/lib/python3.11/site-packages/google/generativeai/types/retriever_types.py new file mode 100644 index 0000000000000000000000000000000000000000..09d9060a8a3ba3559e5a3038e1bd533121c14216 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/generativeai/types/retriever_types.py @@ -0,0 +1,1698 @@ +# -*- coding: utf-8 -*- +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import datetime +import re +import abc +import dataclasses +from typing import Any, AsyncIterable, Optional, Union, Iterable, Mapping +from typing_extensions import deprecated # type: ignore + +import google.ai.generativelanguage as glm +from google.generativeai import protos + +from google.protobuf import field_mask_pb2 +from google.generativeai.client import get_default_retriever_client +from google.generativeai.client import get_default_retriever_async_client +from google.generativeai import string_utils +from google.generativeai.types import helper_types + +from google.generativeai.types import permission_types +from google.generativeai.types.model_types import idecode_time +from google.generativeai.utils import flatten_update_paths + +_VALID_NAME = r"[a-z0-9]([a-z0-9-]{0,38}[a-z0-9])$" +NAME_ERROR_MSG = """The `name` must consist of alphanumeric characters (or -) and be 40 or fewer characters; or be empty. The name you entered: + len(name)== {length} + name={name} +""" + + +def valid_name(name): + return re.match(_VALID_NAME, name) and len(name) < 40 + + +Operator = protos.Condition.Operator +State = protos.Chunk.State + +OperatorOptions = Union[str, int, Operator] +StateOptions = Union[str, int, State] + +ChunkOptions = Union[ + protos.Chunk, + str, + tuple[str, str], + tuple[str, str, Any], + Mapping[str, Any], +] # fmt: no + +BatchCreateChunkOptions = Union[ + protos.BatchCreateChunksRequest, + Mapping[str, str], + Mapping[str, tuple[str, str]], + Iterable[ChunkOptions], +] # fmt: no + +UpdateChunkOptions = Union[protos.UpdateChunkRequest, Mapping[str, Any], tuple[str, Any]] + +BatchUpdateChunksOptions = Union[protos.BatchUpdateChunksRequest, Iterable[UpdateChunkOptions]] + +BatchDeleteChunkOptions = Union[list[protos.DeleteChunkRequest], Iterable[str]] + +_OPERATOR: dict[OperatorOptions, Operator] = { + Operator.OPERATOR_UNSPECIFIED: Operator.OPERATOR_UNSPECIFIED, + 0: Operator.OPERATOR_UNSPECIFIED, + "operator_unspecified": Operator.OPERATOR_UNSPECIFIED, + "unspecified": Operator.OPERATOR_UNSPECIFIED, + Operator.LESS: Operator.LESS, + 1: Operator.LESS, + "operator_less": Operator.LESS, + "less": Operator.LESS, + "<": Operator.LESS, + Operator.LESS_EQUAL: Operator.LESS_EQUAL, + 2: Operator.LESS_EQUAL, + "operator_less_equal": Operator.LESS_EQUAL, + "less_equal": Operator.LESS_EQUAL, + "<=": Operator.LESS_EQUAL, + Operator.EQUAL: Operator.EQUAL, + 3: Operator.EQUAL, + "operator_equal": Operator.EQUAL, + "equal": Operator.EQUAL, + "==": Operator.EQUAL, + Operator.GREATER_EQUAL: Operator.GREATER_EQUAL, + 4: Operator.GREATER_EQUAL, + "operator_greater_equal": Operator.GREATER_EQUAL, + "greater_equal": Operator.GREATER_EQUAL, + Operator.EQUAL: Operator.EQUAL, + 5: Operator.EQUAL, + "operator_equal": Operator.EQUAL, + "equal": Operator.EQUAL, + "==": Operator.EQUAL, + Operator.NOT_EQUAL: Operator.NOT_EQUAL, + 6: Operator.NOT_EQUAL, + "operator_not_equal": Operator.NOT_EQUAL, + "not_equal": Operator.NOT_EQUAL, + "!=": Operator.NOT_EQUAL, + Operator.INCLUDES: Operator.INCLUDES, + 7: Operator.INCLUDES, + "operator_includes": Operator.INCLUDES, + "includes": Operator.INCLUDES, + Operator.EXCLUDES: Operator.EXCLUDES, + 8: Operator.EXCLUDES, + "operator_excludes": Operator.EXCLUDES, + "excludes": Operator.EXCLUDES, + "not in": Operator.EXCLUDES, +} + +_STATE: dict[StateOptions, State] = { + State.STATE_UNSPECIFIED: State.STATE_UNSPECIFIED, + 0: State.STATE_UNSPECIFIED, + "state_unspecifed": State.STATE_UNSPECIFIED, + "unspecified": State.STATE_UNSPECIFIED, + State.STATE_PENDING_PROCESSING: State.STATE_PENDING_PROCESSING, + 1: State.STATE_PENDING_PROCESSING, + "pending_processing": State.STATE_PENDING_PROCESSING, + "pending": State.STATE_PENDING_PROCESSING, + State.STATE_ACTIVE: State.STATE_ACTIVE, + 2: State.STATE_ACTIVE, + "state_active": State.STATE_ACTIVE, + "active": State.STATE_ACTIVE, + State.STATE_FAILED: State.STATE_FAILED, + 10: State.STATE_FAILED, + "state_failed": State.STATE_FAILED, + "failed": State.STATE_FAILED, +} + + +def to_operator(x: OperatorOptions) -> Operator: + if isinstance(x, str): + x = x.lower() + return _OPERATOR[x] + + +def to_state(x: StateOptions) -> State: + if isinstance(x, str): + x = x.lower() + return _STATE[x] + + +@string_utils.prettyprint +@dataclasses.dataclass +class MetadataFilter: + key: str + conditions: Iterable[Condition] + + def _to_proto(self): + kwargs = {} + conditions = [] + for c in self.conditions: + if isinstance(c.value, str): + kwargs["string_value"] = c.value + elif isinstance(c.value, (int, float)): + kwargs["numeric_value"] = float(c.value) + else: + raise ValueError( + f"Invalid value type: The value for the condition must be either a string or an integer/float. Received: '{c.value}' of type {type(c.value).__name__}." + ) + kwargs["operation"] = c.operation + + condition = protos.Condition(**kwargs) + conditions.append(condition) + + return protos.MetadataFilter(key=self.key, conditions=conditions) + + +@string_utils.prettyprint +@dataclasses.dataclass +class Condition: + value: str | float + operation: Operator + + +@string_utils.prettyprint +@dataclasses.dataclass +class CustomMetadata: + key: str + value: str | Iterable[str] | float + + def _to_proto(self): + kwargs = {} + if isinstance(self.value, str): + kwargs["string_value"] = self.value + elif isinstance(self.value, Iterable): + if isinstance(self.value, Mapping): + # If already converted to a protos.StringList, get the values + kwargs["string_list_value"] = self.value + else: + kwargs["string_list_value"] = protos.StringList(values=self.value) + elif isinstance(self.value, (int, float)): + kwargs["numeric_value"] = float(self.value) + else: + raise ValueError( + f"Invalid value type: The value for a custom_metadata specification must be either a list of string values, a string, or an integer/float. Received: '{self.value}' of type {type(self.value).__name__}." + ) + return protos.CustomMetadata(key=self.key, **kwargs) + + @classmethod + def _from_dict(cls, cm): + key = cm["key"] + value = ( + cm.get("value", None) + or cm.get("string_value", None) + or cm.get("string_list_value", None) + or cm.get("numeric_value", None) + ) + return cls(key=key, value=value) + + def _to_dict(self): + proto = self._to_proto() + return type(proto).to_dict(proto) + + +CustomMetadataOptions = Union[CustomMetadata, protos.CustomMetadata, dict] + + +def make_custom_metadata(cm: CustomMetadataOptions) -> CustomMetadata: + if isinstance(cm, CustomMetadata): + return cm + + if isinstance(cm, protos.CustomMetadata): + cm = type(cm).to_dict(cm) + + if isinstance(cm, dict): + return CustomMetadata._from_dict(cm) + else: + raise ValueError( # nofmt + f"Invalid input: Could not create a 'CustomMetadata' from the provided input. Received type: '{type(cm).__name__}', value: '{cm}'." + ) + + +@string_utils.prettyprint +@dataclasses.dataclass +class ChunkData: + string_value: str + + +@string_utils.prettyprint +@dataclasses.dataclass() +class Corpus: + """ + A `Corpus` is a collection of `Documents`. + """ + + name: str + display_name: str + create_time: datetime.datetime + update_time: datetime.datetime + + @property + def permissions(self) -> permission_types.Permissions: + return permission_types.Permissions(self) + + def create_document( + self, + name: str | None = None, + display_name: str | None = None, + custom_metadata: Iterable[CustomMetadata] | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Document: + """ + Request to create a `Document`. + + Args: + name: The `Document` resource name. The ID (name excluding the "corpora/*/documents/" prefix) can contain up to 40 characters + that are lowercase alphanumeric or dashes (-). The ID cannot start or end with a dash. + display_name: The human-readable display name for the `Document`. + custom_metadata: User provided custom metadata stored as key-value pairs used for querying. + request_options: Options for the request. + + Return: + Document object with specified name or display name. + + Raises: + ValueError: When the name is not specified or formatted incorrectly. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + # Handle the custom_metadata parameter + c_data = [] + if custom_metadata: + for cm in custom_metadata: + c_data.append(cm._to_proto()) + + if name is None: + document = protos.Document(display_name=display_name, custom_metadata=c_data) + elif valid_name(name): + document = protos.Document( + name=f"{self.name}/documents/{name}", + display_name=display_name, + custom_metadata=c_data, + ) + else: + raise ValueError(NAME_ERROR_MSG.format(length=len(name), name=name)) + + request = protos.CreateDocumentRequest(parent=self.name, document=document) + response = client.create_document(request, **request_options) + return decode_document(response) + + async def create_document_async( + self, + name: str | None = None, + display_name: str | None = None, + custom_metadata: Iterable[CustomMetadata] | None = None, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Document: + """This is the async version of `Corpus.create_document`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + # Handle the custom_metadata parameter + c_data = [] + if custom_metadata: + for cm in custom_metadata: + c_data.append(cm._to_proto()) + + if name is None: + document = protos.Document(display_name=display_name, custom_metadata=c_data) + elif valid_name(name): + document = protos.Document( + name=f"{self.name}/documents/{name}", + display_name=display_name, + custom_metadata=c_data, + ) + else: + raise ValueError(NAME_ERROR_MSG.format(length=len(name), name=name)) + + request = protos.CreateDocumentRequest(parent=self.name, document=document) + response = await client.create_document(request, **request_options) + return decode_document(response) + + def get_document( + self, + name: str, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Document: + """ + Get information about a specific `Document`. + + Args: + name: The `Document` name. + request_options: Options for the request. + + Return: + `Document` of interest. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if "/" not in name: + name = f"{self.name}/documents/{name}" + + request = protos.GetDocumentRequest(name=name) + response = client.get_document(request, **request_options) + return decode_document(response) + + async def get_document_async( + self, + name: str, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Document: + """This is the async version of `Corpus.get_document`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if "/" not in name: + name = f"{self.name}/documents/{name}" + + request = protos.GetDocumentRequest(name=name) + response = await client.get_document(request, **request_options) + return decode_document(response) + + def _apply_update(self, path, value): + parts = path.split(".") + for part in parts[:-1]: + self = getattr(self, part) + setattr(self, parts[-1], value) + + def update( + self, + updates: dict[str, Any], + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Update a list of fields for a specified `Corpus`. + + Args: + updates: List of fields to update in a `Corpus`. + request_options: Options for the request. + + Return: + Updated version of the `Corpus` object. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + updates = flatten_update_paths(updates) + # At this time, only `display_name` can be updated + for item in updates: + if item != "display_name": + raise ValueError( + "Invalid operation: Currently, only the 'display_name' attribute can be updated for a 'Corpus'." + ) + field_mask = field_mask_pb2.FieldMask() + + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + self._apply_update(path, value) + + request = protos.UpdateCorpusRequest(corpus=self.to_dict(), update_mask=field_mask) + client.update_corpus(request, **request_options) + return self + + async def update_async( + self, + updates: dict[str, Any], + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Corpus.update`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + updates = flatten_update_paths(updates) + # At this time, only `display_name` can be updated + for item in updates: + if item != "display_name": + raise ValueError( + "Invalid operation: Currently, only the 'display_name' attribute can be updated for a 'Corpus'." + ) + field_mask = field_mask_pb2.FieldMask() + + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + self._apply_update(path, value) + + request = protos.UpdateCorpusRequest(corpus=self.to_dict(), update_mask=field_mask) + await client.update_corpus(request, **request_options) + return self + + def query( + self, + query: str, + metadata_filters: Iterable[MetadataFilter] | None = None, + results_count: int | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Iterable[RelevantChunk]: + """ + Query a corpus for information. + + Args: + query: Query string to perform semantic search. + metadata_filters: Filter for `Chunk` metadata. + results_count: The maximum number of `Chunk`s to return; must be less than 100. + request_options: Options for the request. + + Returns: + List of relevant chunks. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if results_count: + if results_count > 100: + raise ValueError( + "Invalid operation: The number of results returned must be between 1 and 100." + ) + + m_f_ = [] + if metadata_filters: + for mf in metadata_filters: + m_f_.append(mf._to_proto()) + + request = protos.QueryCorpusRequest( + name=self.name, + query=query, + metadata_filters=m_f_, + results_count=results_count, + ) + response = client.query_corpus(request, **request_options) + response = type(response).to_dict(response) + + # Create a RelevantChunk object for each chunk listed in response['relevant_chunks'] + relevant_chunks = [] + for c in response["relevant_chunks"]: + rc = RelevantChunk( + chunk_relevance_score=c["chunk_relevance_score"], chunk=Chunk(**c["chunk"]) + ) + relevant_chunks.append(rc) + + return relevant_chunks + + async def query_async( + self, + query: str, + metadata_filters: Iterable[MetadataFilter] | None = None, + results_count: int | None = None, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Iterable[RelevantChunk]: + """This is the async version of `Corpus.query`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if results_count: + if results_count > 100: + raise ValueError( + "Invalid operation: The number of results returned must be between 1 and 100." + ) + + m_f_ = [] + if metadata_filters: + for mf in metadata_filters: + m_f_.append(mf._to_proto()) + + request = protos.QueryCorpusRequest( + name=self.name, + query=query, + metadata_filters=m_f_, + results_count=results_count, + ) + response = await client.query_corpus(request, **request_options) + response = type(response).to_dict(response) + + # Create a RelevantChunk object for each chunk listed in response['relevant_chunks'] + relevant_chunks = [] + for c in response["relevant_chunks"]: + rc = RelevantChunk( + chunk_relevance_score=c["chunk_relevance_score"], chunk=Chunk(**c["chunk"]) + ) + relevant_chunks.append(rc) + + return relevant_chunks + + def delete_document( + self, + name: str, + force: bool = False, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Delete a document in the corpus. + + Args: + name: The `Document` name. + force: If set to true, any `Chunk`s and objects related to this `Document` will also be deleted. + request_options: Options for the request. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if "/" not in name: + name = f"{self.name}/documents/{name}" + + request = protos.DeleteDocumentRequest(name=name, force=bool(force)) + client.delete_document(request, **request_options) + + async def delete_document_async( + self, + name: str, + force: bool = False, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Corpus.delete_document`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if "/" not in name: + name = f"{self.name}/documents/{name}" + + request = protos.DeleteDocumentRequest(name=name, force=bool(force)) + await client.delete_document(request, **request_options) + + def list_documents( + self, + page_size: int | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Iterable[Document]: + """ + List documents in corpus. + + Args: + name: The name of the `Corpus` containing `Document`s. + page_size: The maximum number of `Document`s to return (per page). The service may return fewer `Document`s. + request_options: Options for the request. + + Return: + Paginated list of `Document`s. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + request = protos.ListDocumentsRequest( + parent=self.name, + page_size=page_size, + ) + for doc in client.list_documents(request, **request_options): + yield decode_document(doc) + + async def list_documents_async( + self, + page_size: int | None = None, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> AsyncIterable[Document]: + """This is the async version of `Corpus.list_documents`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + request = protos.ListDocumentsRequest( + parent=self.name, + page_size=page_size, + ) + async for doc in await client.list_documents(request, **request_options): + yield decode_document(doc) + + # PERMISSIONS STUBS: ..deprecated:: >0.5.2 + @deprecated( + "`Corpus.create_permission` is deprecated and will be removed in a future release. \ + Corpus permissions are now managed using the `permissions` property. Use `Corpus.permissions.create` instead." + ) + def create_permission( + self, + role: permission_types.RoleOptions, + grantee_type: Optional[permission_types.GranteeTypeOptions] = None, + email_address: Optional[str] = None, + client: glm.PermissionServiceClient | None = None, + ) -> permission_types.Permission: + return self.permissions.create( + role=role, grantee_type=grantee_type, email_address=email_address, client=client + ) + + @deprecated( + "`Corpus.create_permission_async` is deprecated and will be removed in a future release. \ + Corpus permissions are now managed using the `permissions` property. Use `Corpus.permissions.create_async` instead." + ) + async def create_permission_async( + self, + role: permission_types.RoleOptions, + grantee_type: Optional[permission_types.GranteeTypeOptions] = None, + email_address: Optional[str] = None, + client: glm.PermissionServiceAsyncClient | None = None, + ) -> permission_types.Permission: + return await self.permissions.create_async( + role=role, grantee_type=grantee_type, email_address=email_address, client=client + ) + + @deprecated( + "`Corpus.list_permission` is deprecated and will be removed in a future release. \ + Corpus permissions are now managed using the `permissions` property. Use `Corpus.permissions.list` instead." + ) + def list_permissions( + self, + page_size: Optional[int] = None, + client: glm.PermissionServiceClient | None = None, + ) -> Iterable[permission_types.Permission]: + return self.permissions.list(page_size=page_size, client=client) + + @deprecated( + "`Corpus.list_permission_async` is deprecated and will be removed in a future release. \ + Corpus permissions are now managed using the `permissions` property. Use `Corpus.permissions.list_async` instead." + ) + async def list_permissions_async( + self, + page_size: Optional[int] = None, + client: glm.PermissionServiceAsyncClient | None = None, + ) -> AsyncIterable[permission_types.Permission]: + return self.permissions.list_async(page_size=page_size, client=client) + + # PERMISSIONS STUBS END + + def to_dict(self) -> dict[str, Any]: + result = {"name": self.name, "display_name": self.display_name} + return result + + +def decode_document(document): + document = type(document).to_dict(document) + idecode_time(document, "create_time") + idecode_time(document, "update_time") + return Document(**document) + + +@string_utils.prettyprint +@dataclasses.dataclass() +class Document(abc.ABC): + """ + A `Document` is a collection of `Chunk`s. + """ + + name: str + display_name: str + custom_metadata: list[CustomMetadata] + create_time: datetime.datetime + update_time: datetime.datetime + + def create_chunk( + self, + data: str | ChunkData, + name: str | None = None, + custom_metadata: Iterable[CustomMetadata] | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Chunk: + """ + Create a `Chunk` object which has textual data. + + Args: + data: The content for the `Chunk`, such as the text string. + name: The `Chunk` resource name. The ID (name excluding the "corpora/*/documents/*/chunks/" prefix) can contain up to 40 characters that are lowercase alphanumeric or dashes (-). + custom_metadata: User provided custom metadata stored as key-value pairs. + state: States for the lifecycle of a `Chunk`. + request_options: Options for the request. + + Return: + `Chunk` object with specified data. + + Raises: + ValueError when chunk name not specified correctly. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + # Handle the custom_metadata parameter + c_data = [] + if custom_metadata: + for cm in custom_metadata: + c_data.append(cm._to_proto()) + + if name is not None: + if valid_name(name): + chunk_name = f"{self.name}/chunks/{name}" + else: + raise ValueError(NAME_ERROR_MSG.format(length=len(name), name=name)) + else: + chunk_name = name + + if isinstance(data, str): + chunk = protos.Chunk( + name=chunk_name, data={"string_value": data}, custom_metadata=c_data + ) + else: + chunk = protos.Chunk( + name=chunk_name, + data={"string_value": data.string_value}, + custom_metadata=c_data, + ) + + request = protos.CreateChunkRequest(parent=self.name, chunk=chunk) + response = client.create_chunk(request, **request_options) + return decode_chunk(response) + + async def create_chunk_async( + self, + data: str | ChunkData, + name: str | None = None, + custom_metadata: Iterable[CustomMetadata] | None = None, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Chunk: + """This is the async version of `Document.create_chunk`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + # Handle the custom_metadata parameter + c_data = [] + if custom_metadata: + for cm in custom_metadata: + c_data.append(cm._to_proto()) + + if name is not None: + if valid_name(name): + chunk_name = f"{self.name}/chunks/{name}" + else: + raise ValueError(NAME_ERROR_MSG.format(length=len(name), name=name)) + else: + chunk_name = name + + if isinstance(data, str): + chunk = protos.Chunk( + name=chunk_name, data={"string_value": data}, custom_metadata=c_data + ) + else: + chunk = protos.Chunk( + name=chunk_name, + data={"string_value": data.string_value}, + custom_metadata=c_data, + ) + + request = protos.CreateChunkRequest(parent=self.name, chunk=chunk) + response = await client.create_chunk(request, **request_options) + return decode_chunk(response) + + def _make_chunk(self, chunk: ChunkOptions) -> protos.Chunk: + # del self + if isinstance(chunk, protos.Chunk): + return protos.Chunk(chunk) + elif isinstance(chunk, str): + return protos.Chunk(data={"string_value": chunk}) + elif isinstance(chunk, tuple): + if len(chunk) == 2: + name, data = chunk # pytype: disable=bad-unpacking + custom_metadata = None + elif len(chunk) == 3: + name, data, custom_metadata = chunk # pytype: disable=bad-unpacking + else: + raise ValueError( + f"Tuples should have length 2 or 3, got length: {len(chunk)}\n" + f"value: {chunk}" + ) + + return protos.Chunk( + name=name, + data={"string_value": data}, + custom_metadata=custom_metadata, + ) + elif isinstance(chunk, Mapping): + if isinstance(chunk["data"], str): + chunk = dict(chunk) + chunk["data"] = {"string_value": chunk["data"]} + return protos.Chunk(chunk) + else: + raise TypeError( + f"Invalid input: Could not convert instance of type '{type(chunk).__name__}' to a chunk. Received value: '{chunk}'." + ) + + def _make_batch_create_chunk_request( + self, chunks: BatchCreateChunkOptions + ) -> protos.BatchCreateChunksRequest: + if isinstance(chunks, protos.BatchCreateChunksRequest): + return chunks + + if isinstance(chunks, Mapping): + chunks = chunks.items() + chunks = ( + # Flatten tuples + (key,) + value if isinstance(value, tuple) else (key, value) + for key, value in chunks + ) + + requests = [] + for i, chunk in enumerate(chunks): + chunk = self._make_chunk(chunk) + if chunk.name == "": + chunk.name = str(i) + + chunk.name = f"{self.name}/chunks/{chunk.name}" + + requests.append(protos.CreateChunkRequest(parent=self.name, chunk=chunk)) + + return protos.BatchCreateChunksRequest(parent=self.name, requests=requests) + + def batch_create_chunks( + self, + chunks: BatchCreateChunkOptions, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Create chunks within the given document. + + Args: + chunks: `Chunks` to create. + request_options: Options for the request. + + Return: + Information about the created chunks. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + request = self._make_batch_create_chunk_request(chunks) + response = client.batch_create_chunks(request, **request_options) + return [decode_chunk(chunk) for chunk in response.chunks] + + async def batch_create_chunks_async( + self, + chunks: BatchCreateChunkOptions, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Document.batch_create_chunk`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + request = self._make_batch_create_chunk_request(chunks) + response = await client.batch_create_chunks(request, **request_options) + return [decode_chunk(chunk) for chunk in response.chunks] + + def get_chunk( + self, + name: str, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Get information about a specific chunk. + + Args: + name: Name of `Chunk`. + request_options: Options for the request. + + Returns: + `Chunk` that was requested. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if "/" not in name: + name = f"{self.name}/chunks/{name}" + + request = protos.GetChunkRequest(name=name) + response = client.get_chunk(request, **request_options) + return decode_chunk(response) + + async def get_chunk_async( + self, + name: str, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Document.get_chunk`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if "/" not in name: + name = f"{self.name}/chunks/{name}" + + request = protos.GetChunkRequest(name=name) + response = await client.get_chunk(request, **request_options) + return decode_chunk(response) + + def list_chunks( + self, + page_size: int | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> Iterable[Chunk]: + """ + List chunks of a document. + + Args: + page_size: Maximum number of `Chunk`s to request. + request_options: Options for the request. + + Return: + List of chunks in the document. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + request = protos.ListChunksRequest(parent=self.name, page_size=page_size) + for chunk in client.list_chunks(request, **request_options): + yield decode_chunk(chunk) + + async def list_chunks_async( + self, + page_size: int | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> AsyncIterable[Chunk]: + """This is the async version of `Document.list_chunks`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + request = protos.ListChunksRequest(parent=self.name, page_size=page_size) + async for chunk in await client.list_chunks(request, **request_options): + yield decode_chunk(chunk) + + def query( + self, + query: str, + metadata_filters: Iterable[MetadataFilter] | None = None, + results_count: int | None = None, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> list[RelevantChunk]: + """ + Query a `Document` in the `Corpus` for information. + + Args: + query: Query string to perform semantic search. + metadata_filters: Filter for `Chunk` metadata. + results_count: The maximum number of `Chunk`s to return. + + Returns: + List of relevant chunks. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if results_count: + if results_count < 0 or results_count >= 100: + raise ValueError( + "Invalid operation: The number of results returned must be between 1 and 100." + ) + + m_f_ = [] + if metadata_filters: + for mf in metadata_filters: + m_f_.append(mf._to_proto()) + + request = protos.QueryDocumentRequest( + name=self.name, + query=query, + metadata_filters=m_f_, + results_count=results_count, + ) + response = client.query_document(request, **request_options) + response = type(response).to_dict(response) + + # Create a RelevantChunk object for each chunk listed in response['relevant_chunks'] + relevant_chunks = [] + for c in response["relevant_chunks"]: + rc = RelevantChunk( + chunk_relevance_score=c["chunk_relevance_score"], chunk=Chunk(**c["chunk"]) + ) + relevant_chunks.append(rc) + + return relevant_chunks + + async def query_async( + self, + query: str, + metadata_filters: Iterable[MetadataFilter] | None = None, + results_count: int | None = None, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ) -> list[RelevantChunk]: + """This is the async version of `Document.query`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if results_count: + if results_count < 0 or results_count >= 100: + raise ValueError( + "Invalid operation: The number of results returned must be between 1 and 100." + ) + + m_f_ = [] + if metadata_filters: + for mf in metadata_filters: + m_f_.append(mf._to_proto()) + + request = protos.QueryDocumentRequest( + name=self.name, + query=query, + metadata_filters=m_f_, + results_count=results_count, + ) + response = await client.query_document(request, **request_options) + response = type(response).to_dict(response) + + # Create a RelevantChunk object for each chunk listed in response['relevant_chunks'] + relevant_chunks = [] + for c in response["relevant_chunks"]: + rc = RelevantChunk( + chunk_relevance_score=c["chunk_relevance_score"], chunk=Chunk(**c["chunk"]) + ) + relevant_chunks.append(rc) + + return relevant_chunks + + def _apply_update(self, path, value): + parts = path.split(".") + for part in parts[:-1]: + self = getattr(self, part) + setattr(self, parts[-1], value) + + def update( + self, + updates: dict[str, Any], + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Update a list of fields for a specified document. + + Args: + updates: The list of fields to update. + request_options: Options for the request. + + Return: + `Chunk` object with specified updates. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + updates = flatten_update_paths(updates) + # At this time, only `display_name` can be updated + for item in updates: + if item != "display_name": + raise ValueError( + "Invalid operation: Currently, only the 'display_name' attribute can be updated for a 'Document'." + ) + field_mask = field_mask_pb2.FieldMask() + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + self._apply_update(path, value) + + request = protos.UpdateDocumentRequest(document=self.to_dict(), update_mask=field_mask) + client.update_document(request, **request_options) + return self + + async def update_async( + self, + updates: dict[str, Any], + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Document.update`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + updates = flatten_update_paths(updates) + # At this time, only `display_name` can be updated + for item in updates: + if item != "display_name": + raise ValueError( + "Invalid operation: Currently, only the 'display_name' attribute can be updated for a 'Document'." + ) + field_mask = field_mask_pb2.FieldMask() + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + self._apply_update(path, value) + + request = protos.UpdateDocumentRequest(document=self.to_dict(), update_mask=field_mask) + await client.update_document(request, **request_options) + return self + + def batch_update_chunks( + self, + chunks: BatchUpdateChunksOptions, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Update multiple chunks within the same document. + + Args: + chunks: Data structure specifying which `Chunk`s to update and what the required updates are. + request_options: Options for the request. + + Return: + Updated `Chunk`s. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if isinstance(chunks, protos.BatchUpdateChunksRequest): + response = client.batch_update_chunks(chunks) + response = type(response).to_dict(response) + return response + + _requests = [] + if isinstance(chunks, Mapping): + # Key is name of chunk, value is a dictionary of updates + for key, value in chunks.items(): + chunk_to_update = self.get_chunk(name=key) + + # Handle the custom_metadata parameter + c_data = [] + if chunk_to_update.custom_metadata: + for cm in chunk_to_update.custom_metadata: + c_data.append(cm._to_proto()) + + # When handling updates, use to the _to_proto result of the custom_metadata + chunk_to_update.custom_metadata = c_data + + updates = flatten_update_paths(value) + # At this time, only `data` can be updated + for item in updates: + if item != "data.string_value": + raise ValueError( + f"Invalid operation: Currently, only the 'data' attribute can be updated for a 'Chunk'. Attempted to update '{item}'." + ) + field_mask = field_mask_pb2.FieldMask() + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + chunk_to_update._apply_update(path, value) + _requests.append( + protos.UpdateChunkRequest( + chunk=chunk_to_update.to_dict(), update_mask=field_mask + ) + ) + request = protos.BatchUpdateChunksRequest(parent=self.name, requests=_requests) + response = client.batch_update_chunks(request, **request_options) + response = type(response).to_dict(response) + return response + if isinstance(chunks, Iterable) and not isinstance(chunks, Mapping): + for chunk in chunks: + if isinstance(chunk, protos.UpdateChunkRequest): + _requests.append(chunk) + elif isinstance(chunk, tuple): + # First element is name of chunk, second element contains updates + chunk_to_update = self.get_chunk(name=chunk[0]) + + # Handle the custom_metadata parameter + c_data = [] + if chunk_to_update.custom_metadata: + for cm in chunk_to_update.custom_metadata: + c_data.append(cm._to_proto()) + + # When handling updates, use to the _to_proto result of the custom_metadata + chunk_to_update.custom_metadata = c_data + + updates = flatten_update_paths(chunk[1]) + field_mask = field_mask_pb2.FieldMask() + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + chunk_to_update._apply_update(path, value) + _requests.append( + {"chunk": chunk_to_update.to_dict(), "update_mask": field_mask} + ) + else: + raise TypeError( + "Invalid input: The 'chunks' parameter must be a list of 'protos.UpdateChunkRequests'," + " dictionaries, or tuples of dictionaries." + ) + request = protos.BatchUpdateChunksRequest(parent=self.name, requests=_requests) + response = client.batch_update_chunks(request, **request_options) + response = type(response).to_dict(response) + return response + + async def batch_update_chunks_async( + self, + chunks: BatchUpdateChunksOptions, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Document.batch_update_chunks`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if isinstance(chunks, protos.BatchUpdateChunksRequest): + response = client.batch_update_chunks(chunks) + response = type(response).to_dict(response) + return response + + _requests = [] + if isinstance(chunks, Mapping): + # Key is name of chunk, value is a dictionary of updates + for key, value in chunks.items(): + chunk_to_update = self.get_chunk(name=key) + + # Handle the custom_metadata parameter + c_data = [] + if chunk_to_update.custom_metadata: + for cm in chunk_to_update.custom_metadata: + c_data.append(cm._to_proto()) + + # When handling updates, use to the _to_proto result of the custom_metadata + chunk_to_update.custom_metadata = c_data + + updates = flatten_update_paths(value) + # At this time, only `data` can be updated + for item in updates: + if item != "data.string_value": + raise ValueError( + f"Invalid operation: Currently, only the 'data' attribute can be updated for a 'Chunk'. Attempted to update '{item}'." + ) + field_mask = field_mask_pb2.FieldMask() + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + chunk_to_update._apply_update(path, value) + _requests.append( + protos.UpdateChunkRequest( + chunk=chunk_to_update.to_dict(), update_mask=field_mask + ) + ) + request = protos.BatchUpdateChunksRequest(parent=self.name, requests=_requests) + response = await client.batch_update_chunks(request, **request_options) + response = type(response).to_dict(response) + return response + if isinstance(chunks, Iterable) and not isinstance(chunks, Mapping): + for chunk in chunks: + if isinstance(chunk, protos.UpdateChunkRequest): + _requests.append(chunk) + elif isinstance(chunk, tuple): + # First element is name of chunk, second element contains updates + chunk_to_update = self.get_chunk(name=chunk[0]) + + # Handle the custom_metadata parameter + c_data = [] + if chunk_to_update.custom_metadata: + for cm in chunk_to_update.custom_metadata: + c_data.append(cm._to_proto()) + + # When handling updates, use to the _to_proto result of the custom_metadata + chunk_to_update.custom_metadata = c_data + + updates = flatten_update_paths(chunk[1]) + field_mask = field_mask_pb2.FieldMask() + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + chunk_to_update._apply_update(path, value) + _requests.append( + {"chunk": chunk_to_update.to_dict(), "update_mask": field_mask} + ) + else: + raise TypeError( + "Invalid input: The 'chunks' parameter must be a list of 'protos.UpdateChunkRequests', " + "dictionaries, or tuples of dictionaries." + ) + request = protos.BatchUpdateChunksRequest(parent=self.name, requests=_requests) + response = await client.batch_update_chunks(request, **request_options) + response = type(response).to_dict(response) + return response + + def delete_chunk( + self, + name: str, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, # fmt: {} + ): + """ + Delete a `Chunk`. + + Args: + name: The `Chunk` name. + request_options: Options for the request. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if "/" not in name: + name = f"{self.name}/chunks/{name}" + + request = protos.DeleteChunkRequest(name=name) + client.delete_chunk(request, **request_options) + + async def delete_chunk_async( + self, + name: str, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, # fmt: {} + ): + """This is the async version of `Document.delete_chunk`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if "/" not in name: + name = f"{self.name}/chunks/{name}" + + request = protos.DeleteChunkRequest(name=name) + await client.delete_chunk(request, **request_options) + + def batch_delete_chunks( + self, + chunks: BatchDeleteChunkOptions, + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Delete multiple `Chunk`s from a document. + + Args: + chunks: Names of `Chunks` to delete. + request_options: Options for the request. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + if all(isinstance(x, protos.DeleteChunkRequest) for x in chunks): + request = protos.BatchDeleteChunksRequest(parent=self.name, requests=chunks) + client.batch_delete_chunks(request, **request_options) + elif isinstance(chunks, Iterable): + _request_list = [] + for chunk_name in chunks: + _request_list.append(protos.DeleteChunkRequest(name=chunk_name)) + request = protos.BatchDeleteChunksRequest(parent=self.name, requests=_request_list) + client.batch_delete_chunks(request, **request_options) + else: + raise ValueError( + "Invalid operation: To delete chunks, you must pass in either the names of the chunks as an iterable, " + "or multiple 'protos.DeleteChunkRequest's." + ) + + async def batch_delete_chunks_async( + self, + chunks: BatchDeleteChunkOptions, + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Document.batch_delete_chunks`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + if all(isinstance(x, protos.DeleteChunkRequest) for x in chunks): + request = protos.BatchDeleteChunksRequest(parent=self.name, requests=chunks) + await client.batch_delete_chunks(request, **request_options) + elif isinstance(chunks, Iterable): + _request_list = [] + for chunk_name in chunks: + _request_list.append(protos.DeleteChunkRequest(name=chunk_name)) + request = protos.BatchDeleteChunksRequest(parent=self.name, requests=_request_list) + await client.batch_delete_chunks(request, **request_options) + else: + raise ValueError( + "Invalid operation: To delete chunks, you must pass in either the names of the chunks as an iterable, " + "or multiple 'protos.DeleteChunkRequest's." + ) + + def to_dict(self) -> dict[str, Any]: + result = { + "name": self.name, + "display_name": self.display_name, + "custom_metadata": self.custom_metadata, + } + return result + + +def decode_chunk(chunk: protos.Chunk) -> Chunk: + chunk = type(chunk).to_dict(chunk) + idecode_time(chunk, "create_time") + idecode_time(chunk, "update_time") + return Chunk(**chunk) + + +@string_utils.prettyprint +@dataclasses.dataclass +class RelevantChunk: + chunk_relevance_score: float + chunk: Chunk + + +@string_utils.prettyprint +@dataclasses.dataclass(init=False) +class Chunk(abc.ABC): + """ + A `Chunk` is part of the `Document`, or the actual text. + """ + + name: str + data: ChunkData + custom_metadata: list[CustomMetadata] | None + state: State + create_time: datetime.datetime | None + update_time: datetime.datetime | None + + def __init__( + self, + name: str, + data: ChunkData | str, + custom_metadata: Iterable[CustomMetadata] | None, + state: State, + create_time: datetime.datetime | str | None = None, + update_time: datetime.datetime | str | None = None, + ): + self.name = name + if isinstance(data, str): + self.data = ChunkData(string_value=data) + elif isinstance(data, dict): + self.data = ChunkData(string_value=data["string_value"]) + + if custom_metadata is None: + self.custom_metadata = [] + else: + self.custom_metadata = [make_custom_metadata(cm) for cm in custom_metadata] + + self.state = to_state(state) + + if create_time is None: + self.create_time = None + elif isinstance(create_time, datetime.datetime): + self.create_time = create_time + else: + self.create_time = datetime.datetime.strptime(create_time, "%Y-%m-%dT%H:%M:%S.%fZ") + + if update_time is None: + self.update_time = None + elif isinstance(update_time, datetime.datetime): + self.update_time = update_time + else: + self.update_time = datetime.datetime.strptime(update_time, "%Y-%m-%dT%H:%M:%S.%fZ") + + def _apply_update(self, path, value): + parts = path.split(".") + for part in parts[:-1]: + self = getattr(self, part) + setattr(self, parts[-1], value) + + def update( + self, + updates: dict[str, Any], + client: glm.RetrieverServiceClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """ + Update a list of fields for a specified `Chunk`. + + Args: + updates: List of fields to update for a `Chunk`. + request_options: Options for the request. + + Return: + Updated `Chunk` object. + """ + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_client() + + # Handle the custom_metadata parameter + c_data = [] + if self.custom_metadata: + for cm in self.custom_metadata: + c_data.append(cm._to_proto()) + + # When handling updates, use to the _to_proto result of the custom_metadata + self.custom_metadata = c_data + + updates = flatten_update_paths(updates) + # At this time, only `data` can be updated + for item in updates: + if item != "data.string_value": + raise ValueError( + f"Invalid operation: Currently, only the 'data' attribute can be updated for a 'Chunk'. Attempted to update '{item}'." + ) + field_mask = field_mask_pb2.FieldMask() + + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + self._apply_update(path, value) + request = protos.UpdateChunkRequest(chunk=self.to_dict(), update_mask=field_mask) + + client.update_chunk(request, **request_options) + return self + + async def update_async( + self, + updates: dict[str, Any], + client: glm.RetrieverServiceAsyncClient | None = None, + request_options: helper_types.RequestOptionsType | None = None, + ): + """This is the async version of `Chunk.update`.""" + if request_options is None: + request_options = {} + + if client is None: + client = get_default_retriever_async_client() + + # Handle the custom_metadata parameter + c_data = [] + if self.custom_metadata: + for cm in self.custom_metadata: + c_data.append(cm._to_proto()) + + # When handling updates, use to the _to_proto result of the custom_metadata + self.custom_metadata = c_data + + updates = flatten_update_paths(updates) + # At this time, only `data` can be updated + for item in updates: + if item != "data.string_value": + raise ValueError( + f"Invalid operation: Currently, only the 'data' attribute can be updated for a 'Chunk'. Attempted to update '{item}'." + ) + field_mask = field_mask_pb2.FieldMask() + + for path in updates.keys(): + field_mask.paths.append(path) + for path, value in updates.items(): + self._apply_update(path, value) + request = protos.UpdateChunkRequest(chunk=self.to_dict(), update_mask=field_mask) + + await client.update_chunk(request, **request_options) + return self + + def to_dict(self) -> dict[str, Any]: + result = { + "name": self.name, + "data": dataclasses.asdict(self.data), + "custom_metadata": [cm._to_dict() for cm in self.custom_metadata], + "state": self.state, + } + return result diff --git a/.venv/lib/python3.11/site-packages/google/logging/type/http_request_pb2.py b/.venv/lib/python3.11/site-packages/google/logging/type/http_request_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..0a8f0bbcc69cadde1292a57db99e5d2e9e19392b --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/logging/type/http_request_pb2.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- + +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/logging/type/http_request.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n&google/logging/type/http_request.proto\x12\x13google.logging.type\x1a\x1egoogle/protobuf/duration.proto"\xef\x02\n\x0bHttpRequest\x12\x16\n\x0erequest_method\x18\x01 \x01(\t\x12\x13\n\x0brequest_url\x18\x02 \x01(\t\x12\x14\n\x0crequest_size\x18\x03 \x01(\x03\x12\x0e\n\x06status\x18\x04 \x01(\x05\x12\x15\n\rresponse_size\x18\x05 \x01(\x03\x12\x12\n\nuser_agent\x18\x06 \x01(\t\x12\x11\n\tremote_ip\x18\x07 \x01(\t\x12\x11\n\tserver_ip\x18\r \x01(\t\x12\x0f\n\x07referer\x18\x08 \x01(\t\x12*\n\x07latency\x18\x0e \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x63\x61\x63he_lookup\x18\x0b \x01(\x08\x12\x11\n\tcache_hit\x18\t \x01(\x08\x12*\n"cache_validated_with_origin_server\x18\n \x01(\x08\x12\x18\n\x10\x63\x61\x63he_fill_bytes\x18\x0c \x01(\x03\x12\x10\n\x08protocol\x18\x0f \x01(\tB\xbe\x01\n\x17\x63om.google.logging.typeB\x10HttpRequestProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Type\xea\x02\x1cGoogle::Cloud::Logging::Typeb\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.logging.type.http_request_pb2", _globals +) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\027com.google.logging.typeB\020HttpRequestProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type\352\002\034Google::Cloud::Logging::Type" + _globals["_HTTPREQUEST"]._serialized_start = 96 + _globals["_HTTPREQUEST"]._serialized_end = 463 +# @@protoc_insertion_point(module_scope) diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48fe260a62951c1a8c7e56308b071022beb0b16c Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/any.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/any.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..194d48a0b5142f8cfa633f5f494c8d49e54c10ea Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/any.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/any_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/any_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d223e1f04370ba2e39e9ef12695fb55639daf49 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/any_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/api_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/api_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2361e8f93e1ca276648772848c271dd723100668 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/api_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/descriptor_database.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/descriptor_database.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6ebd3ef9646316a8444dc278ab4d50dc436ea14 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/descriptor_database.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/descriptor_pool.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/descriptor_pool.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a175e9a3bd5dead4822a24dec04d63482649d756 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/descriptor_pool.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/duration.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/duration.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1567e6043f2ce569f261c6020c01a1609243eb81 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/duration.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/empty_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/empty_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a8874ee65f2fe851cc64d38ff6d3b9d12f398c8 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/empty_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/json_format.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/json_format.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08058523859c88119bc7fd19802fde3d1a357086 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/json_format.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed13a4cc3852d03e6c559ef1d857732ce2ab7e3f Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto_builder.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto_builder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8105fdc98e96f994303bf6b36a22c0aa0308f292 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto_builder.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto_json.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto_json.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0dd58fdcb2444e05477dc41ece2531749e56ebf6 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/proto_json.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/reflection.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/reflection.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d999f7221bef33a0ba484895da979ebd32c03dd9 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/reflection.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/runtime_version.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/runtime_version.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..279a293e879d8d9fed7b1ae43468d20016577372 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/runtime_version.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/service.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/service.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7cf8e20dba55f8dcbdd485069a13ee773568769 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/service.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/service_reflection.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/service_reflection.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7038db59964c2ed50b83e45d0c052685f112a2d Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/service_reflection.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/source_context_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/source_context_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f376f97744c75de4bad70ca4a47115a0f9eb9717 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/source_context_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/struct_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/struct_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae3b935dfa017341c4ab6c7ef8015b683eb070b8 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/struct_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/timestamp.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/timestamp.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..316edef50c2ee111a1e0c833b851b17ff5678dc6 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/timestamp.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/timestamp_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/timestamp_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..08372c72fb36308c36a3e4b4498800e43b754374 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/timestamp_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/wrappers_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/wrappers_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5326b0314660fdcc05481219aabf372203062ea1 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/__pycache__/wrappers_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__init__.py b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3422cc7e7f2bf66b533b1d6f920248cef993c123 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e0c683be1ca8fbc9514f27f926112f32a8eea24 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/compiler/plugin_pb2.py b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/plugin_pb2.py new file mode 100644 index 0000000000000000000000000000000000000000..100ae6da790c47a779d33de2aafe69a38b54238c --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/compiler/plugin_pb2.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: google/protobuf/compiler/plugin.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'google/protobuf/compiler/plugin.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"c\n\x07Version\x12\x14\n\x05major\x18\x01 \x01(\x05R\x05major\x12\x14\n\x05minor\x18\x02 \x01(\x05R\x05minor\x12\x14\n\x05patch\x18\x03 \x01(\x05R\x05patch\x12\x16\n\x06suffix\x18\x04 \x01(\tR\x06suffix\"\xcf\x02\n\x14\x43odeGeneratorRequest\x12(\n\x10\x66ile_to_generate\x18\x01 \x03(\tR\x0e\x66ileToGenerate\x12\x1c\n\tparameter\x18\x02 \x01(\tR\tparameter\x12\x43\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\tprotoFile\x12\\\n\x17source_file_descriptors\x18\x11 \x03(\x0b\x32$.google.protobuf.FileDescriptorProtoR\x15sourceFileDescriptors\x12L\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.VersionR\x0f\x63ompilerVersion\"\x85\x04\n\x15\x43odeGeneratorResponse\x12\x14\n\x05\x65rror\x18\x01 \x01(\tR\x05\x65rror\x12-\n\x12supported_features\x18\x02 \x01(\x04R\x11supportedFeatures\x12\'\n\x0fminimum_edition\x18\x03 \x01(\x05R\x0eminimumEdition\x12\'\n\x0fmaximum_edition\x18\x04 \x01(\x05R\x0emaximumEdition\x12H\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.FileR\x04\x66ile\x1a\xb1\x01\n\x04\x46ile\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\'\n\x0finsertion_point\x18\x02 \x01(\tR\x0einsertionPoint\x12\x18\n\x07\x63ontent\x18\x0f \x01(\tR\x07\x63ontent\x12R\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfoR\x11generatedCodeInfo\"W\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x12\x1d\n\x19\x46\x45\x41TURE_SUPPORTS_EDITIONS\x10\x02\x42r\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb\xaa\x02\x18Google.Protobuf.Compiler') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb\252\002\030Google.Protobuf.Compiler' + _globals['_VERSION']._serialized_start=101 + _globals['_VERSION']._serialized_end=200 + _globals['_CODEGENERATORREQUEST']._serialized_start=203 + _globals['_CODEGENERATORREQUEST']._serialized_end=538 + _globals['_CODEGENERATORRESPONSE']._serialized_start=541 + _globals['_CODEGENERATORRESPONSE']._serialized_end=1058 + _globals['_CODEGENERATORRESPONSE_FILE']._serialized_start=792 + _globals['_CODEGENERATORRESPONSE_FILE']._serialized_end=969 + _globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_start=971 + _globals['_CODEGENERATORRESPONSE_FEATURE']._serialized_end=1058 +# @@protoc_insertion_point(module_scope) diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__init__.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e676e288161e00ae5c266d080ca5018336849a7d --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__init__.py @@ -0,0 +1,7 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e02fb9bfccb10bd2c4ac3dc69cfa5bf59fae1c0 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/_parameterized.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/_parameterized.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c17d4d704c5d446f0f21e207038d7c5e23da035 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/_parameterized.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e2809fc2cec6ebebb3c04fd518036029aabe2c33 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/api_implementation.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/builder.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/builder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..31b1a3932da2bd339aec71a537316d9a1bc8b827 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/builder.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/containers.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/containers.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e8eb6963351e8056841c592cdd622e1b31d6e1eb Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/containers.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/decoder.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/decoder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a9e8d2ccf3dedb82953c106fdd423c6aec97c1e2 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/decoder.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c8071be37219ff64d00b5567697ff40ea0233993 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/encoder.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e603b5cdaa789031d91b4f2301de3c5a2dd04565 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bf5e34ee8ae5ef2f28fd0c082c72e34e323fc6b9 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/extension_dict.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/field_mask.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/field_mask.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1d8679682aef63605f80b46f343dd5dd56aa5cc Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/field_mask.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/message_listener.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/message_listener.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..994ff915e8cf2c33d379a4b28ceb46cbe1a67db6 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/message_listener.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/python_edition_defaults.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/python_edition_defaults.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..05a8914e69e9fbb28474eb29c703efabedeeb9c7 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/python_edition_defaults.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/python_message.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/python_message.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc2e64261a5af711f7822dd0bbcbde396ec7973e Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/python_message.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/testing_refleaks.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/testing_refleaks.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ffad9c8de2a4e3cc850f67f263fd8ce78150cb0e Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/testing_refleaks.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/type_checkers.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/type_checkers.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c107afe30a477de0ab10ffb133e4e558b5a2066f Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/type_checkers.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/well_known_types.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/well_known_types.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4d7c40c3eea4f05cce9e45ed6a532cca1056343c Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/well_known_types.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/wire_format.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/wire_format.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4204950d40f569339cede274c07bd36e3980b1b6 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/internal/__pycache__/wire_format.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/api_implementation.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/api_implementation.py new file mode 100644 index 0000000000000000000000000000000000000000..b40446b41480b80c6a8c90528ba81a2c441b4798 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/api_implementation.py @@ -0,0 +1,142 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Determine which implementation of the protobuf API is used in this process. +""" + +import importlib +import os +import sys +import warnings + +_GOOGLE3_PYTHON_UPB_DEFAULT = True + + +def _ApiVersionToImplementationType(api_version): + if api_version == 2: + return 'cpp' + if api_version == 1: + raise ValueError('api_version=1 is no longer supported.') + if api_version == 0: + return 'python' + return None + + +_implementation_type = None +try: + # pylint: disable=g-import-not-at-top + from google.protobuf.internal import _api_implementation + # The compile-time constants in the _api_implementation module can be used to + # switch to a certain implementation of the Python API at build time. + _implementation_type = _ApiVersionToImplementationType( + _api_implementation.api_version) +except ImportError: + pass # Unspecified by compiler flags. + + +def _CanImport(mod_name): + try: + mod = importlib.import_module(mod_name) + # Work around a known issue in the classic bootstrap .par import hook. + if not mod: + raise ImportError(mod_name + ' import succeeded but was None') + return True + except ImportError: + return False + + +if _implementation_type is None: + if _CanImport('google._upb._message'): + _implementation_type = 'upb' + elif _CanImport('google.protobuf.pyext._message'): + _implementation_type = 'cpp' + else: + _implementation_type = 'python' + + +# This environment variable can be used to switch to a certain implementation +# of the Python API, overriding the compile-time constants in the +# _api_implementation module. Right now only 'python', 'cpp' and 'upb' are +# valid values. Any other value will raise error. +_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', + _implementation_type) + +if _implementation_type not in ('python', 'cpp', 'upb'): + raise ValueError('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION {0} is not ' + 'supported. Please set to \'python\', \'cpp\' or ' + '\'upb\'.'.format(_implementation_type)) + +if 'PyPy' in sys.version and _implementation_type == 'cpp': + warnings.warn('PyPy does not work yet with cpp protocol buffers. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + +_c_module = None + +if _implementation_type == 'cpp': + try: + # pylint: disable=g-import-not-at-top + from google.protobuf.pyext import _message + sys.modules['google3.net.proto2.python.internal.cpp._message'] = _message + _c_module = _message + del _message + except ImportError: + # TODO: fail back to python + warnings.warn( + 'Selected implementation cpp is not available.') + pass + +if _implementation_type == 'upb': + try: + # pylint: disable=g-import-not-at-top + from google._upb import _message + _c_module = _message + del _message + except ImportError: + warnings.warn('Selected implementation upb is not available. ' + 'Falling back to the python implementation.') + _implementation_type = 'python' + pass + +# Detect if serialization should be deterministic by default +try: + # The presence of this module in a build allows the proto implementation to + # be upgraded merely via build deps. + # + # NOTE: Merely importing this automatically enables deterministic proto + # serialization for C++ code, but we still need to export it as a boolean so + # that we can do the same for `_implementation_type == 'python'`. + # + # NOTE2: It is possible for C++ code to enable deterministic serialization by + # default _without_ affecting Python code, if the C++ implementation is not in + # use by this module. That is intended behavior, so we don't actually expose + # this boolean outside of this module. + # + # pylint: disable=g-import-not-at-top,unused-import + from google.protobuf import enable_deterministic_proto_serialization + _python_deterministic_proto_serialization = True +except ImportError: + _python_deterministic_proto_serialization = False + + +# Usage of this function is discouraged. Clients shouldn't care which +# implementation of the API is in use. Note that there is no guarantee +# that differences between APIs will be maintained. +# Please don't use this function if possible. +def Type(): + return _implementation_type + + +# See comment on 'Type' above. +# TODO: Remove the API, it returns a constant. b/228102101 +def Version(): + return 2 + + +# For internal use only +def IsPythonDefaultSerializationDeterministic(): + return _python_deterministic_proto_serialization diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/builder.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..4c0f2873d809ff381b7d973fbc17db7de9506c4a --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/builder.py @@ -0,0 +1,117 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Builds descriptors, message classes and services for generated _pb2.py. + +This file is only called in python generated _pb2.py files. It builds +descriptors, message classes and services that users can directly use +in generated code. +""" + +__author__ = 'jieluo@google.com (Jie Luo)' + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import python_message +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +_sym_db = _symbol_database.Default() + + +def BuildMessageAndEnumDescriptors(file_des, module): + """Builds message and enum descriptors. + + Args: + file_des: FileDescriptor of the .proto file + module: Generated _pb2 module + """ + + def BuildNestedDescriptors(msg_des, prefix): + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + module_name = prefix + name.upper() + module[module_name] = nested_msg + BuildNestedDescriptors(nested_msg, module_name + '_') + for enum_des in msg_des.enum_types: + module[prefix + enum_des.name.upper()] = enum_des + + for (name, msg_des) in file_des.message_types_by_name.items(): + module_name = '_' + name.upper() + module[module_name] = msg_des + BuildNestedDescriptors(msg_des, module_name + '_') + + +def BuildTopDescriptorsAndMessages(file_des, module_name, module): + """Builds top level descriptors and message classes. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + + def BuildMessage(msg_des): + create_dict = {} + for (name, nested_msg) in msg_des.nested_types_by_name.items(): + create_dict[name] = BuildMessage(nested_msg) + create_dict['DESCRIPTOR'] = msg_des + create_dict['__module__'] = module_name + message_class = _reflection.GeneratedProtocolMessageType( + msg_des.name, (_message.Message,), create_dict) + _sym_db.RegisterMessage(message_class) + return message_class + + # top level enums + for (name, enum_des) in file_des.enum_types_by_name.items(): + module['_' + name.upper()] = enum_des + module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) + for enum_value in enum_des.values: + module[enum_value.name] = enum_value.number + + # top level extensions + for (name, extension_des) in file_des.extensions_by_name.items(): + module[name.upper() + '_FIELD_NUMBER'] = extension_des.number + module[name] = extension_des + + # services + for (name, service) in file_des.services_by_name.items(): + module['_' + name.upper()] = service + + # Build messages. + for (name, msg_des) in file_des.message_types_by_name.items(): + module[name] = BuildMessage(msg_des) + + +def AddHelpersToExtensions(file_des): + """no-op to keep old generated code work with new runtime. + + Args: + file_des: FileDescriptor of the .proto file + """ + # TODO: Remove this on-op + return + + +def BuildServices(file_des, module_name, module): + """Builds services classes and services stub class. + + Args: + file_des: FileDescriptor of the .proto file + module_name: str, the name of generated _pb2 module + module: Generated _pb2 module + """ + # pylint: disable=g-import-not-at-top + from google.protobuf import service_reflection + # pylint: enable=g-import-not-at-top + for (name, service) in file_des.services_by_name.items(): + module[name] = service_reflection.GeneratedServiceType( + name, (), + dict(DESCRIPTOR=service, __module__=module_name)) + stub_name = name + '_Stub' + module[stub_name] = service_reflection.GeneratedServiceStubType( + stub_name, (module[name],), + dict(DESCRIPTOR=service, __module__=module_name)) diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/decoder.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/decoder.py new file mode 100644 index 0000000000000000000000000000000000000000..dcde1d9420c9ab2db7a309322f914f123b045f1a --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/decoder.py @@ -0,0 +1,1036 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Code for decoding protocol buffer primitives. + +This code is very similar to encoder.py -- read the docs for that module first. + +A "decoder" is a function with the signature: + Decode(buffer, pos, end, message, field_dict) +The arguments are: + buffer: The string containing the encoded message. + pos: The current position in the string. + end: The position in the string where the current message ends. May be + less than len(buffer) if we're reading a sub-message. + message: The message object into which we're parsing. + field_dict: message._fields (avoids a hashtable lookup). +The decoder reads the field and stores it into field_dict, returning the new +buffer position. A decoder for a repeated field may proactively decode all of +the elements of that field, if they appear consecutively. + +Note that decoders may throw any of the following: + IndexError: Indicates a truncated message. + struct.error: Unpacking of a fixed-width field failed. + message.DecodeError: Other errors. + +Decoders are expected to raise an exception if they are called with pos > end. +This allows callers to be lax about bounds checking: it's fineto read past +"end" as long as you are sure that someone else will notice and throw an +exception later on. + +Something up the call stack is expected to catch IndexError and struct.error +and convert them to message.DecodeError. + +Decoders are constructed using decoder constructors with the signature: + MakeDecoder(field_number, is_repeated, is_packed, key, new_default) +The arguments are: + field_number: The field number of the field we want to decode. + is_repeated: Is the field a repeated field? (bool) + is_packed: Is the field a packed field? (bool) + key: The key to use when looking up the field within field_dict. + (This is actually the FieldDescriptor but nothing in this + file should depend on that.) + new_default: A function which takes a message object as a parameter and + returns a new instance of the default value for this field. + (This is called for repeated fields and sub-messages, when an + instance does not already exist.) + +As with encoders, we define a decoder constructor for every type of field. +Then, for every field of every message class we construct an actual decoder. +That decoder goes into a dict indexed by tag, so when we decode a message +we repeatedly read a tag, look up the corresponding decoder, and invoke it. +""" + +__author__ = 'kenton@google.com (Kenton Varda)' + +import math +import struct + +from google.protobuf import message +from google.protobuf.internal import containers +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format + + +# This is not for optimization, but rather to avoid conflicts with local +# variables named "message". +_DecodeError = message.DecodeError + + +def _VarintDecoder(mask, result_type): + """Return an encoder for a basic varint value (does not include tag). + + Decoded values will be bitwise-anded with the given mask before being + returned, e.g. to limit them to 32 bits. The returned decoder does not + take the usual "end" parameter -- the caller is expected to do bounds checking + after the fact (often the caller can defer such checking until later). The + decoder returns a (value, new_pos) pair. + """ + + def DecodeVarint(buffer, pos: int=None): + result = 0 + shift = 0 + while 1: + if pos is None: + # Read from BytesIO + try: + b = buffer.read(1)[0] + except IndexError as e: + if shift == 0: + # End of BytesIO. + return None + else: + raise ValueError('Fail to read varint %s' % str(e)) + else: + b = buffer[pos] + pos += 1 + result |= ((b & 0x7f) << shift) + if not (b & 0x80): + result &= mask + result = result_type(result) + return result if pos is None else (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + + return DecodeVarint + + +def _SignedVarintDecoder(bits, result_type): + """Like _VarintDecoder() but decodes signed values.""" + + signbit = 1 << (bits - 1) + mask = (1 << bits) - 1 + + def DecodeVarint(buffer, pos): + result = 0 + shift = 0 + while 1: + b = buffer[pos] + result |= ((b & 0x7f) << shift) + pos += 1 + if not (b & 0x80): + result &= mask + result = (result ^ signbit) - signbit + result = result_type(result) + return (result, pos) + shift += 7 + if shift >= 64: + raise _DecodeError('Too many bytes when decoding varint.') + return DecodeVarint + +# All 32-bit and 64-bit values are represented as int. +_DecodeVarint = _VarintDecoder((1 << 64) - 1, int) +_DecodeSignedVarint = _SignedVarintDecoder(64, int) + +# Use these versions for values which must be limited to 32 bits. +_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int) +_DecodeSignedVarint32 = _SignedVarintDecoder(32, int) + + +def ReadTag(buffer, pos): + """Read a tag from the memoryview, and return a (tag_bytes, new_pos) tuple. + + We return the raw bytes of the tag rather than decoding them. The raw + bytes can then be used to look up the proper decoder. This effectively allows + us to trade some work that would be done in pure-python (decoding a varint) + for work that is done in C (searching for a byte string in a hash table). + In a low-level language it would be much cheaper to decode the varint and + use that, but not in Python. + + Args: + buffer: memoryview object of the encoded bytes + pos: int of the current position to start from + + Returns: + Tuple[bytes, int] of the tag data and new position. + """ + start = pos + while buffer[pos] & 0x80: + pos += 1 + pos += 1 + + tag_bytes = buffer[start:pos].tobytes() + return tag_bytes, pos + + +# -------------------------------------------------------------------- + + +def _SimpleDecoder(wire_type, decode_value): + """Return a constructor for a decoder for fields of a particular type. + + Args: + wire_type: The field's wire type. + decode_value: A function which decodes an individual value, e.g. + _DecodeVarint() + """ + + def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + if is_packed: + local_DecodeVarint = _DecodeVarint + def DecodePackedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + (endpoint, pos) = local_DecodeVarint(buffer, pos) + endpoint += pos + if endpoint > end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + (element, pos) = decode_value(buffer, pos) + value.append(element) + if pos > endpoint: + del value[-1] # Discard corrupt value. + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_type) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = decode_value(buffer, pos) + value.append(element) + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (new_value, pos) = decode_value(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not new_value: + field_dict.pop(key, None) + else: + field_dict[key] = new_value + return pos + return DecodeField + + return SpecificDecoder + + +def _ModifiedDecoder(wire_type, decode_value, modify_value): + """Like SimpleDecoder but additionally invokes modify_value on every value + before storing it. Usually modify_value is ZigZagDecode. + """ + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + def InnerDecode(buffer, pos): + (result, new_pos) = decode_value(buffer, pos) + return (modify_value(result), new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _StructPackDecoder(wire_type, format): + """Return a constructor for a decoder for a fixed-width field. + + Args: + wire_type: The field's wire type. + format: The format string to pass to struct.unpack(). + """ + + value_size = struct.calcsize(format) + local_unpack = struct.unpack + + # Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but + # not enough to make a significant difference. + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + + def InnerDecode(buffer, pos): + new_pos = pos + value_size + result = local_unpack(format, buffer[pos:new_pos])[0] + return (result, new_pos) + return _SimpleDecoder(wire_type, InnerDecode) + + +def _FloatDecoder(): + """Returns a decoder for a float field. + + This code works around a bug in struct.unpack for non-finite 32-bit + floating-point values. + """ + + local_unpack = struct.unpack + + def InnerDecode(buffer, pos): + """Decode serialized float to a float and new position. + + Args: + buffer: memoryview of the serialized bytes + pos: int, position in the memory view to start at. + + Returns: + Tuple[float, int] of the deserialized float value and new position + in the serialized data. + """ + # We expect a 32-bit value in little-endian byte order. Bit 1 is the sign + # bit, bits 2-9 represent the exponent, and bits 10-32 are the significand. + new_pos = pos + 4 + float_bytes = buffer[pos:new_pos].tobytes() + + # If this value has all its exponent bits set, then it's non-finite. + # In Python 2.4, struct.unpack will convert it to a finite 64-bit value. + # To avoid that, we parse it specially. + if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'): + # If at least one significand bit is set... + if float_bytes[0:3] != b'\x00\x00\x80': + return (math.nan, new_pos) + # If sign bit is set... + if float_bytes[3:4] == b'\xFF': + return (-math.inf, new_pos) + return (math.inf, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack('= b'\xF0') + and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')): + return (math.nan, new_pos) + + # Note that we expect someone up-stack to catch struct.error and convert + # it to _DecodeError -- this way we don't have to set up exception- + # handling blocks every time we parse one value. + result = local_unpack(' end: + raise _DecodeError('Truncated message.') + while pos < endpoint: + value_start_pos = pos + (element, pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + # pylint: enable=protected-access + if pos > endpoint: + if element in enum_type.values_by_number: + del value[-1] # Discard corrupt value. + else: + del message._unknown_fields[-1] + # pylint: enable=protected-access + raise _DecodeError('Packed element was truncated.') + return pos + return DecodePackedField + elif is_repeated: + tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (element, new_pos) = _DecodeSignedVarint32(buffer, pos) + # pylint: disable=protected-access + if element in enum_type.values_by_number: + value.append(element) + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (tag_bytes, buffer[pos:new_pos].tobytes())) + # pylint: enable=protected-access + # Predict that the next tag is another copy of the same repeated + # field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos >= end: + # Prediction failed. Return. + if new_pos > end: + raise _DecodeError('Truncated message.') + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + """Decode serialized repeated enum to its value and a new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + value_start_pos = pos + (enum_value, pos) = _DecodeSignedVarint32(buffer, pos) + if pos > end: + raise _DecodeError('Truncated message.') + if clear_if_default and not enum_value: + field_dict.pop(key, None) + return pos + # pylint: disable=protected-access + if enum_value in enum_type.values_by_number: + field_dict[key] = enum_value + else: + if not message._unknown_fields: + message._unknown_fields = [] + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_VARINT) + message._unknown_fields.append( + (tag_bytes, buffer[value_start_pos:pos].tobytes())) + # pylint: enable=protected-access + return pos + return DecodeField + + +# -------------------------------------------------------------------- + + +Int32Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32) + +Int64Decoder = _SimpleDecoder( + wire_format.WIRETYPE_VARINT, _DecodeSignedVarint) + +UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32) +UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint) + +SInt32Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode) +SInt64Decoder = _ModifiedDecoder( + wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode) + +# Note that Python conveniently guarantees that when using the '<' prefix on +# formats, they will also have the same size across all platforms (as opposed +# to without the prefix, where their sizes depend on the C compiler's basic +# type sizes). +Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, ' end: + raise _DecodeError('Truncated string.') + value.append(_ConvertToUnicode(buffer[pos:new_pos])) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos]) + return new_pos + return DecodeField + + +def BytesDecoder(field_number, is_repeated, is_packed, key, new_default, + clear_if_default=False): + """Returns a decoder for a bytes field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + value.append(buffer[pos:new_pos].tobytes()) + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated string.') + if clear_if_default and not size: + field_dict.pop(key, None) + else: + field_dict[key] = buffer[pos:new_pos].tobytes() + return new_pos + return DecodeField + + +def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a group field.""" + + end_tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_END_GROUP) + end_tag_len = len(end_tag_bytes) + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_START_GROUP) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value.add()._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read sub-message. + pos = value._InternalParse(buffer, pos, end) + # Read end tag. + new_pos = pos+end_tag_len + if buffer[pos:new_pos] != end_tag_bytes or new_pos > end: + raise _DecodeError('Missing group end tag.') + return new_pos + return DecodeField + + +def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): + """Returns a decoder for a message field.""" + + local_DecodeVarint = _DecodeVarint + + assert not is_packed + if is_repeated: + tag_bytes = encoder.TagBytes(field_number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + def DecodeRepeatedField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value.add()._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + return DecodeRepeatedField + else: + def DecodeField(buffer, pos, end, message, field_dict): + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + if value._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + return new_pos + return DecodeField + + +# -------------------------------------------------------------------- + +MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP) + +def MessageSetItemDecoder(descriptor): + """Returns a decoder for a MessageSet item. + + The parameter is the message Descriptor. + + The message set message looks like this: + message MessageSet { + repeated group Item = 1 { + required int32 type_id = 2; + required string message = 3; + } + } + """ + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + local_ReadTag = ReadTag + local_DecodeVarint = _DecodeVarint + local_SkipField = SkipField + + def DecodeItem(buffer, pos, end, message, field_dict): + """Decode serialized message set to its value and new position. + + Args: + buffer: memoryview of the serialized bytes. + pos: int, position in the memory view to start at. + end: int, end position of serialized data + message: Message object to store unknown fields in + field_dict: Map[Descriptor, Any] to store decoded values in. + + Returns: + int, new position in serialized data. + """ + message_set_item_start = pos + type_id = -1 + message_start = -1 + message_end = -1 + + # Technically, type_id and message can appear in any order, so we need + # a little loop here. + while 1: + (tag_bytes, pos) = local_ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = local_DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = local_DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + extension = message.Extensions._FindExtensionByNumber(type_id) + # pylint: disable=protected-access + if extension is not None: + value = field_dict.get(extension) + if value is None: + message_type = extension.message_type + if not hasattr(message_type, '_concrete_class'): + message_factory.GetMessageClass(message_type) + value = field_dict.setdefault( + extension, message_type._concrete_class()) + if value._InternalParse(buffer, message_start,message_end) != message_end: + # The only reason _InternalParse would return early is if it encountered + # an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + else: + if not message._unknown_fields: + message._unknown_fields = [] + message._unknown_fields.append( + (MESSAGE_SET_ITEM_TAG, buffer[message_set_item_start:pos].tobytes())) + # pylint: enable=protected-access + + return pos + + return DecodeItem + + +def UnknownMessageSetItemDecoder(): + """Returns a decoder for a Unknown MessageSet item.""" + + type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT) + message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED) + item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP) + + def DecodeUnknownItem(buffer): + pos = 0 + end = len(buffer) + message_start = -1 + message_end = -1 + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + if tag_bytes == type_id_tag_bytes: + (type_id, pos) = _DecodeVarint(buffer, pos) + elif tag_bytes == message_tag_bytes: + (size, message_start) = _DecodeVarint(buffer, pos) + pos = message_end = message_start + size + elif tag_bytes == item_end_tag_bytes: + break + else: + pos = SkipField(buffer, pos, end, tag_bytes) + if pos == -1: + raise _DecodeError('Missing group end tag.') + + if pos > end: + raise _DecodeError('Truncated message.') + + if type_id == -1: + raise _DecodeError('MessageSet item missing type_id.') + if message_start == -1: + raise _DecodeError('MessageSet item missing message.') + + return (type_id, buffer[message_start:message_end].tobytes()) + + return DecodeUnknownItem + +# -------------------------------------------------------------------- + +def MapDecoder(field_descriptor, new_default, is_message_map): + """Returns a decoder for a map field.""" + + key = field_descriptor + tag_bytes = encoder.TagBytes(field_descriptor.number, + wire_format.WIRETYPE_LENGTH_DELIMITED) + tag_len = len(tag_bytes) + local_DecodeVarint = _DecodeVarint + # Can't read _concrete_class yet; might not be initialized. + message_type = field_descriptor.message_type + + def DecodeMap(buffer, pos, end, message, field_dict): + submsg = message_type._concrete_class() + value = field_dict.get(key) + if value is None: + value = field_dict.setdefault(key, new_default(message)) + while 1: + # Read length. + (size, pos) = local_DecodeVarint(buffer, pos) + new_pos = pos + size + if new_pos > end: + raise _DecodeError('Truncated message.') + # Read sub-message. + submsg.Clear() + if submsg._InternalParse(buffer, pos, new_pos) != new_pos: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise _DecodeError('Unexpected end-group tag.') + + if is_message_map: + value[submsg.key].CopyFrom(submsg.value) + else: + value[submsg.key] = submsg.value + + # Predict that the next tag is another copy of the same repeated field. + pos = new_pos + tag_len + if buffer[new_pos:pos] != tag_bytes or new_pos == end: + # Prediction failed. Return. + return new_pos + + return DecodeMap + +# -------------------------------------------------------------------- +# Optimization is not as heavy here because calls to SkipField() are rare, +# except for handling end-group tags. + +def _SkipVarint(buffer, pos, end): + """Skip a varint value. Returns the new position.""" + # Previously ord(buffer[pos]) raised IndexError when pos is out of range. + # With this code, ord(b'') raises TypeError. Both are handled in + # python_message.py to generate a 'Truncated message' error. + while ord(buffer[pos:pos+1].tobytes()) & 0x80: + pos += 1 + pos += 1 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + +def _SkipFixed64(buffer, pos, end): + """Skip a fixed64 value. Returns the new position.""" + + pos += 8 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed64(buffer, pos): + """Decode a fixed64.""" + new_pos = pos + 8 + return (struct.unpack(' end: + raise _DecodeError('Truncated message.') + return pos + + +def _SkipGroup(buffer, pos, end): + """Skip sub-group. Returns the new position.""" + + while 1: + (tag_bytes, pos) = ReadTag(buffer, pos) + new_pos = SkipField(buffer, pos, end, tag_bytes) + if new_pos == -1: + return pos + pos = new_pos + + +def _DecodeUnknownFieldSet(buffer, pos, end_pos=None): + """Decode UnknownFieldSet. Returns the UnknownFieldSet and new position.""" + + unknown_field_set = containers.UnknownFieldSet() + while end_pos is None or pos < end_pos: + (tag_bytes, pos) = ReadTag(buffer, pos) + (tag, _) = _DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if wire_type == wire_format.WIRETYPE_END_GROUP: + break + (data, pos) = _DecodeUnknownField(buffer, pos, wire_type) + # pylint: disable=protected-access + unknown_field_set._add(field_number, wire_type, data) + + return (unknown_field_set, pos) + + +def _DecodeUnknownField(buffer, pos, wire_type): + """Decode a unknown field. Returns the UnknownField and new position.""" + + if wire_type == wire_format.WIRETYPE_VARINT: + (data, pos) = _DecodeVarint(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED64: + (data, pos) = _DecodeFixed64(buffer, pos) + elif wire_type == wire_format.WIRETYPE_FIXED32: + (data, pos) = _DecodeFixed32(buffer, pos) + elif wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED: + (size, pos) = _DecodeVarint(buffer, pos) + data = buffer[pos:pos+size].tobytes() + pos += size + elif wire_type == wire_format.WIRETYPE_START_GROUP: + (data, pos) = _DecodeUnknownFieldSet(buffer, pos) + elif wire_type == wire_format.WIRETYPE_END_GROUP: + return (0, -1) + else: + raise _DecodeError('Wrong wire type in tag.') + + return (data, pos) + + +def _EndGroup(buffer, pos, end): + """Skipping an END_GROUP tag returns -1 to tell the parent loop to break.""" + + return -1 + + +def _SkipFixed32(buffer, pos, end): + """Skip a fixed32 value. Returns the new position.""" + + pos += 4 + if pos > end: + raise _DecodeError('Truncated message.') + return pos + + +def _DecodeFixed32(buffer, pos): + """Decode a fixed32.""" + + new_pos = pos + 4 + return (struct.unpack(' str + ValueType = int + + def __init__(self, enum_type): + """Inits EnumTypeWrapper with an EnumDescriptor.""" + self._enum_type = enum_type + self.DESCRIPTOR = enum_type # pylint: disable=invalid-name + + def Name(self, number): # pylint: disable=invalid-name + """Returns a string containing the name of an enum value.""" + try: + return self._enum_type.values_by_number[number].name + except KeyError: + pass # fall out to break exception chaining + + if not isinstance(number, int): + raise TypeError( + 'Enum value for {} must be an int, but got {} {!r}.'.format( + self._enum_type.name, type(number), number)) + else: + # repr here to handle the odd case when you pass in a boolean. + raise ValueError('Enum {} has no name defined for value {!r}'.format( + self._enum_type.name, number)) + + def Value(self, name): # pylint: disable=invalid-name + """Returns the value corresponding to the given enum name.""" + try: + return self._enum_type.values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise ValueError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def keys(self): + """Return a list of the string names in the enum. + + Returns: + A list of strs, in the order they were defined in the .proto file. + """ + + return [value_descriptor.name + for value_descriptor in self._enum_type.values] + + def values(self): + """Return a list of the integer values in the enum. + + Returns: + A list of ints, in the order they were defined in the .proto file. + """ + + return [value_descriptor.number + for value_descriptor in self._enum_type.values] + + def items(self): + """Return a list of the (name, value) pairs of the enum. + + Returns: + A list of (str, int) pairs, in the order they were defined + in the .proto file. + """ + return [(value_descriptor.name, value_descriptor.number) + for value_descriptor in self._enum_type.values] + + def __getattr__(self, name): + """Returns the value corresponding to the given enum name.""" + try: + return super( + EnumTypeWrapper, + self).__getattribute__('_enum_type').values_by_name[name].number + except KeyError: + pass # fall out to break exception chaining + raise AttributeError('Enum {} has no value defined for name {!r}'.format( + self._enum_type.name, name)) + + def __or__(self, other): + """Returns the union type of self and other.""" + if sys.version_info >= (3, 10): + return type(self) | other + else: + raise NotImplementedError( + 'You may not use | on EnumTypes (or classes) below python 3.10' + ) diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/extension_dict.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/extension_dict.py new file mode 100644 index 0000000000000000000000000000000000000000..89e64d32d9351ca73a60f24972aa254cc54863ac --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/extension_dict.py @@ -0,0 +1,194 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Contains _ExtensionDict class to represent extensions. +""" + +from google.protobuf.internal import type_checkers +from google.protobuf.descriptor import FieldDescriptor + + +def _VerifyExtensionHandle(message, extension_handle): + """Verify that the given extension handle is valid.""" + + if not isinstance(extension_handle, FieldDescriptor): + raise KeyError('HasExtension() expects an extension handle, got: %s' % + extension_handle) + + if not extension_handle.is_extension: + raise KeyError('"%s" is not an extension.' % extension_handle.full_name) + + if not extension_handle.containing_type: + raise KeyError('"%s" is missing a containing_type.' + % extension_handle.full_name) + + if extension_handle.containing_type is not message.DESCRIPTOR: + raise KeyError('Extension "%s" extends message type "%s", but this ' + 'message is of type "%s".' % + (extension_handle.full_name, + extension_handle.containing_type.full_name, + message.DESCRIPTOR.full_name)) + + +# TODO: Unify error handling of "unknown extension" crap. +# TODO: Support iteritems()-style iteration over all +# extensions with the "has" bits turned on? +class _ExtensionDict(object): + + """Dict-like container for Extension fields on proto instances. + + Note that in all cases we expect extension handles to be + FieldDescriptors. + """ + + def __init__(self, extended_message): + """ + Args: + extended_message: Message instance for which we are the Extensions dict. + """ + self._extended_message = extended_message + + def __getitem__(self, extension_handle): + """Returns the current value of the given extension handle.""" + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + result = self._extended_message._fields.get(extension_handle) + if result is not None: + return result + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + result = extension_handle._default_constructor(self._extended_message) + elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + message_type = extension_handle.message_type + if not hasattr(message_type, '_concrete_class'): + # pylint: disable=g-import-not-at-top + from google.protobuf import message_factory + message_factory.GetMessageClass(message_type) + if not hasattr(extension_handle.message_type, '_concrete_class'): + from google.protobuf import message_factory + message_factory.GetMessageClass(extension_handle.message_type) + result = extension_handle.message_type._concrete_class() + try: + result._SetListener(self._extended_message._listener_for_children) + except ReferenceError: + pass + else: + # Singular scalar -- just return the default without inserting into the + # dict. + return extension_handle.default_value + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + result = self._extended_message._fields.setdefault( + extension_handle, result) + + return result + + def __eq__(self, other): + if not isinstance(other, self.__class__): + return False + + my_fields = self._extended_message.ListFields() + other_fields = other._extended_message.ListFields() + + # Get rid of non-extension fields. + my_fields = [field for field in my_fields if field.is_extension] + other_fields = [field for field in other_fields if field.is_extension] + + return my_fields == other_fields + + def __ne__(self, other): + return not self == other + + def __len__(self): + fields = self._extended_message.ListFields() + # Get rid of non-extension fields. + extension_fields = [field for field in fields if field[0].is_extension] + return len(extension_fields) + + def __hash__(self): + raise TypeError('unhashable object') + + # Note that this is only meaningful for non-repeated, scalar extension + # fields. Note also that we may have to call _Modified() when we do + # successfully set a field this way, to set any necessary "has" bits in the + # ancestors of the extended message. + def __setitem__(self, extension_handle, value): + """If extension_handle specifies a non-repeated, scalar extension + field, sets the value of that field. + """ + + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or + extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): + raise TypeError( + 'Cannot assign to extension "%s" because it is a repeated or ' + 'composite type.' % extension_handle.full_name) + + # It's slightly wasteful to lookup the type checker each time, + # but we expect this to be a vanishingly uncommon case anyway. + type_checker = type_checkers.GetTypeChecker(extension_handle) + # pylint: disable=protected-access + self._extended_message._fields[extension_handle] = ( + type_checker.CheckValue(value)) + self._extended_message._Modified() + + def __delitem__(self, extension_handle): + self._extended_message.ClearExtension(extension_handle) + + def _FindExtensionByName(self, name): + """Tries to find a known extension with the specified name. + + Args: + name: Extension full name. + + Returns: + Extension field descriptor. + """ + descriptor = self._extended_message.DESCRIPTOR + extensions = descriptor.file.pool._extensions_by_name[descriptor] + return extensions.get(name, None) + + def _FindExtensionByNumber(self, number): + """Tries to find a known extension with the field number. + + Args: + number: Extension field number. + + Returns: + Extension field descriptor. + """ + descriptor = self._extended_message.DESCRIPTOR + extensions = descriptor.file.pool._extensions_by_number[descriptor] + return extensions.get(number, None) + + def __iter__(self): + # Return a generator over the populated extension fields + return (f[0] for f in self._extended_message.ListFields() + if f[0].is_extension) + + def __contains__(self, extension_handle): + _VerifyExtensionHandle(self._extended_message, extension_handle) + + if extension_handle not in self._extended_message._fields: + return False + + if extension_handle.label == FieldDescriptor.LABEL_REPEATED: + return bool(self._extended_message._fields.get(extension_handle)) + + if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + value = self._extended_message._fields.get(extension_handle) + # pylint: disable=protected-access + return value is not None and value._is_present_in_parent + + return True diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/field_mask.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/field_mask.py new file mode 100644 index 0000000000000000000000000000000000000000..ae34f08a8adb9fddd0e7b6011e4f359e31ac3796 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/field_mask.py @@ -0,0 +1,310 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Contains FieldMask class.""" + +from google.protobuf.descriptor import FieldDescriptor + + +class FieldMask(object): + """Class for FieldMask message type.""" + + __slots__ = () + + def ToJsonString(self): + """Converts FieldMask to string according to proto3 JSON spec.""" + camelcase_paths = [] + for path in self.paths: + camelcase_paths.append(_SnakeCaseToCamelCase(path)) + return ','.join(camelcase_paths) + + def FromJsonString(self, value): + """Converts string to FieldMask according to proto3 JSON spec.""" + if not isinstance(value, str): + raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) + self.Clear() + if value: + for path in value.split(','): + self.paths.append(_CamelCaseToSnakeCase(path)) + + def IsValidForDescriptor(self, message_descriptor): + """Checks whether the FieldMask is valid for Message Descriptor.""" + for path in self.paths: + if not _IsValidPath(message_descriptor, path): + return False + return True + + def AllFieldsFromDescriptor(self, message_descriptor): + """Gets all direct fields of Message Descriptor to FieldMask.""" + self.Clear() + for field in message_descriptor.fields: + self.paths.append(field.name) + + def CanonicalFormFromMask(self, mask): + """Converts a FieldMask to the canonical form. + + Removes paths that are covered by another path. For example, + "foo.bar" is covered by "foo" and will be removed if "foo" + is also in the FieldMask. Then sorts all paths in alphabetical order. + + Args: + mask: The original FieldMask to be converted. + """ + tree = _FieldMaskTree(mask) + tree.ToFieldMask(self) + + def Union(self, mask1, mask2): + """Merges mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + tree.MergeFromFieldMask(mask2) + tree.ToFieldMask(self) + + def Intersect(self, mask1, mask2): + """Intersects mask1 and mask2 into this FieldMask.""" + _CheckFieldMaskMessage(mask1) + _CheckFieldMaskMessage(mask2) + tree = _FieldMaskTree(mask1) + intersection = _FieldMaskTree() + for path in mask2.paths: + tree.IntersectPath(path, intersection) + intersection.ToFieldMask(self) + + def MergeMessage( + self, source, destination, + replace_message_field=False, replace_repeated_field=False): + """Merges fields specified in FieldMask from source to destination. + + Args: + source: Source message. + destination: The destination message to be merged into. + replace_message_field: Replace message field if True. Merge message + field if False. + replace_repeated_field: Replace repeated field if True. Append + elements of repeated field if False. + """ + tree = _FieldMaskTree(self) + tree.MergeMessage( + source, destination, replace_message_field, replace_repeated_field) + + +def _IsValidPath(message_descriptor, path): + """Checks whether the path is valid for Message Descriptor.""" + parts = path.split('.') + last = parts.pop() + for name in parts: + field = message_descriptor.fields_by_name.get(name) + if (field is None or + field.label == FieldDescriptor.LABEL_REPEATED or + field.type != FieldDescriptor.TYPE_MESSAGE): + return False + message_descriptor = field.message_type + return last in message_descriptor.fields_by_name + + +def _CheckFieldMaskMessage(message): + """Raises ValueError if message is not a FieldMask.""" + message_descriptor = message.DESCRIPTOR + if (message_descriptor.name != 'FieldMask' or + message_descriptor.file.name != 'google/protobuf/field_mask.proto'): + raise ValueError('Message {0} is not a FieldMask.'.format( + message_descriptor.full_name)) + + +def _SnakeCaseToCamelCase(path_name): + """Converts a path name from snake_case to camelCase.""" + result = [] + after_underscore = False + for c in path_name: + if c.isupper(): + raise ValueError( + 'Fail to print FieldMask to Json string: Path name ' + '{0} must not contain uppercase letters.'.format(path_name)) + if after_underscore: + if c.islower(): + result.append(c.upper()) + after_underscore = False + else: + raise ValueError( + 'Fail to print FieldMask to Json string: The ' + 'character after a "_" must be a lowercase letter ' + 'in path name {0}.'.format(path_name)) + elif c == '_': + after_underscore = True + else: + result += c + + if after_underscore: + raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' + 'in path name {0}.'.format(path_name)) + return ''.join(result) + + +def _CamelCaseToSnakeCase(path_name): + """Converts a field name from camelCase to snake_case.""" + result = [] + for c in path_name: + if c == '_': + raise ValueError('Fail to parse FieldMask: Path name ' + '{0} must not contain "_"s.'.format(path_name)) + if c.isupper(): + result += '_' + result += c.lower() + else: + result += c + return ''.join(result) + + +class _FieldMaskTree(object): + """Represents a FieldMask in a tree structure. + + For example, given a FieldMask "foo.bar,foo.baz,bar.baz", + the FieldMaskTree will be: + [_root] -+- foo -+- bar + | | + | +- baz + | + +- bar --- baz + In the tree, each leaf node represents a field path. + """ + + __slots__ = ('_root',) + + def __init__(self, field_mask=None): + """Initializes the tree by FieldMask.""" + self._root = {} + if field_mask: + self.MergeFromFieldMask(field_mask) + + def MergeFromFieldMask(self, field_mask): + """Merges a FieldMask to the tree.""" + for path in field_mask.paths: + self.AddPath(path) + + def AddPath(self, path): + """Adds a field path into the tree. + + If the field path to add is a sub-path of an existing field path + in the tree (i.e., a leaf node), it means the tree already matches + the given path so nothing will be added to the tree. If the path + matches an existing non-leaf node in the tree, that non-leaf node + will be turned into a leaf node with all its children removed because + the path matches all the node's children. Otherwise, a new path will + be added. + + Args: + path: The field path to add. + """ + node = self._root + for name in path.split('.'): + if name not in node: + node[name] = {} + elif not node[name]: + # Pre-existing empty node implies we already have this entire tree. + return + node = node[name] + # Remove any sub-trees we might have had. + node.clear() + + def ToFieldMask(self, field_mask): + """Converts the tree to a FieldMask.""" + field_mask.Clear() + _AddFieldPaths(self._root, '', field_mask) + + def IntersectPath(self, path, intersection): + """Calculates the intersection part of a field path with this tree. + + Args: + path: The field path to calculates. + intersection: The out tree to record the intersection part. + """ + node = self._root + for name in path.split('.'): + if name not in node: + return + elif not node[name]: + intersection.AddPath(path) + return + node = node[name] + intersection.AddLeafNodes(path, node) + + def AddLeafNodes(self, prefix, node): + """Adds leaf nodes begin with prefix to this tree.""" + if not node: + self.AddPath(prefix) + for name in node: + child_path = prefix + '.' + name + self.AddLeafNodes(child_path, node[name]) + + def MergeMessage( + self, source, destination, + replace_message, replace_repeated): + """Merge all fields specified by this tree from source to destination.""" + _MergeMessage( + self._root, source, destination, replace_message, replace_repeated) + + +def _StrConvert(value): + """Converts value to str if it is not.""" + # This file is imported by c extension and some methods like ClearField + # requires string for the field name. py2/py3 has different text + # type and may use unicode. + if not isinstance(value, str): + return value.encode('utf-8') + return value + + +def _MergeMessage( + node, source, destination, replace_message, replace_repeated): + """Merge all fields specified by a sub-tree from source to destination.""" + source_descriptor = source.DESCRIPTOR + for name in node: + child = node[name] + field = source_descriptor.fields_by_name[name] + if field is None: + raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( + name, source_descriptor.full_name)) + if child: + # Sub-paths are only allowed for singular message fields. + if (field.label == FieldDescriptor.LABEL_REPEATED or + field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): + raise ValueError('Error: Field {0} in message {1} is not a singular ' + 'message field and cannot have sub-fields.'.format( + name, source_descriptor.full_name)) + if source.HasField(name): + _MergeMessage( + child, getattr(source, name), getattr(destination, name), + replace_message, replace_repeated) + continue + if field.label == FieldDescriptor.LABEL_REPEATED: + if replace_repeated: + destination.ClearField(_StrConvert(name)) + repeated_source = getattr(source, name) + repeated_destination = getattr(destination, name) + repeated_destination.MergeFrom(repeated_source) + else: + if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: + if replace_message: + destination.ClearField(_StrConvert(name)) + if source.HasField(name): + getattr(destination, name).MergeFrom(getattr(source, name)) + else: + setattr(destination, name, getattr(source, name)) + + +def _AddFieldPaths(node, prefix, field_mask): + """Adds the field paths descended from node to field_mask.""" + if not node and prefix: + field_mask.paths.append(prefix) + return + for name in sorted(node): + if prefix: + child_path = prefix + '.' + name + else: + child_path = name + _AddFieldPaths(node[name], child_path, field_mask) diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/message_listener.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/message_listener.py new file mode 100644 index 0000000000000000000000000000000000000000..ff1c127d549dcd8973fdbe67fda09b9ce3460abe --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/message_listener.py @@ -0,0 +1,55 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Defines a listener interface for observing certain +state transitions on Message objects. + +Also defines a null implementation of this interface. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + + +class MessageListener(object): + + """Listens for modifications made to a message. Meant to be registered via + Message._SetListener(). + + Attributes: + dirty: If True, then calling Modified() would be a no-op. This can be + used to avoid these calls entirely in the common case. + """ + + def Modified(self): + """Called every time the message is modified in such a way that the parent + message may need to be updated. This currently means either: + (a) The message was modified for the first time, so the parent message + should henceforth mark the message as present. + (b) The message's cached byte size became dirty -- i.e. the message was + modified for the first time after a previous call to ByteSize(). + Therefore the parent should also mark its byte size as dirty. + Note that (a) implies (b), since new objects start out with a client cached + size (zero). However, we document (a) explicitly because it is important. + + Modified() will *only* be called in response to one of these two events -- + not every time the sub-message is modified. + + Note that if the listener's |dirty| attribute is true, then calling + Modified at the moment would be a no-op, so it can be skipped. Performance- + sensitive callers should check this attribute directly before calling since + it will be true most of the time. + """ + + raise NotImplementedError + + +class NullMessageListener(object): + + """No-op MessageListener implementation.""" + + def Modified(self): + pass diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/python_message.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/python_message.py new file mode 100644 index 0000000000000000000000000000000000000000..1f901688094453dc6081fcd08126793e8688557f --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/python_message.py @@ -0,0 +1,1583 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +# This code is meant to work on Python 2.4 and above only. +# +# TODO: Helpers for verbose, common checks like seeing if a +# descriptor's cpp_type is CPPTYPE_MESSAGE. + +"""Contains a metaclass and helper functions used to create +protocol message classes from Descriptor objects at runtime. + +Recall that a metaclass is the "type" of a class. +(A class is to a metaclass what an instance is to a class.) + +In this case, we use the GeneratedProtocolMessageType metaclass +to inject all the useful functionality into the classes +output by the protocol compiler at compile-time. + +The upshot of all this is that the real implementation +details for ALL pure-Python protocol buffers are *here in +this file*. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import datetime +from io import BytesIO +import struct +import sys +import warnings +import weakref + +from google.protobuf import descriptor as descriptor_mod +from google.protobuf import message as message_mod +from google.protobuf import text_format +# We use "as" to avoid name collisions with variables. +from google.protobuf.internal import api_implementation +from google.protobuf.internal import containers +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import enum_type_wrapper +from google.protobuf.internal import extension_dict +from google.protobuf.internal import message_listener as message_listener_mod +from google.protobuf.internal import type_checkers +from google.protobuf.internal import well_known_types +from google.protobuf.internal import wire_format + +_FieldDescriptor = descriptor_mod.FieldDescriptor +_AnyFullTypeName = 'google.protobuf.Any' +_StructFullTypeName = 'google.protobuf.Struct' +_ListValueFullTypeName = 'google.protobuf.ListValue' +_ExtensionDict = extension_dict._ExtensionDict + +class GeneratedProtocolMessageType(type): + + """Metaclass for protocol message classes created at runtime from Descriptors. + + We add implementations for all methods described in the Message class. We + also create properties to allow getting/setting all fields in the protocol + message. Finally, we create slots to prevent users from accidentally + "setting" nonexistent fields in the protocol message, which then wouldn't get + serialized / deserialized properly. + + The protocol compiler currently uses this metaclass to create protocol + message classes at runtime. Clients can also manually create their own + classes at runtime, as in this example: + + mydescriptor = Descriptor(.....) + factory = symbol_database.Default() + factory.pool.AddDescriptor(mydescriptor) + MyProtoClass = message_factory.GetMessageClass(mydescriptor) + myproto_instance = MyProtoClass() + myproto.foo_field = 23 + ... + """ + + # Must be consistent with the protocol-compiler code in + # proto2/compiler/internal/generator.*. + _DESCRIPTOR_KEY = 'DESCRIPTOR' + + def __new__(cls, name, bases, dictionary): + """Custom allocation for runtime-generated class types. + + We override __new__ because this is apparently the only place + where we can meaningfully set __slots__ on the class we're creating(?). + (The interplay between metaclasses and slots is not very well-documented). + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + + Returns: + Newly-allocated class. + + Raises: + RuntimeError: Generated code only work with python cpp extension. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + if isinstance(descriptor, str): + raise RuntimeError('The generated code only work with python cpp ' + 'extension, but it is using pure python runtime.') + + # If a concrete class already exists for this descriptor, don't try to + # create another. Doing so will break any messages that already exist with + # the existing class. + # + # The C++ implementation appears to have its own internal `PyMessageFactory` + # to achieve similar results. + # + # This most commonly happens in `text_format.py` when using descriptors from + # a custom pool; it calls message_factory.GetMessageClass() on a + # descriptor which already has an existing concrete class. + new_class = getattr(descriptor, '_concrete_class', None) + if new_class: + return new_class + + if descriptor.full_name in well_known_types.WKTBASES: + bases += (well_known_types.WKTBASES[descriptor.full_name],) + _AddClassAttributesForNestedExtensions(descriptor, dictionary) + _AddSlots(descriptor, dictionary) + + superclass = super(GeneratedProtocolMessageType, cls) + new_class = superclass.__new__(cls, name, bases, dictionary) + return new_class + + def __init__(cls, name, bases, dictionary): + """Here we perform the majority of our work on the class. + We add enum getters, an __init__ method, implementations + of all Message methods, and properties for all fields + in the protocol type. + + Args: + name: Name of the class (ignored, but required by the + metaclass protocol). + bases: Base classes of the class we're constructing. + (Should be message.Message). We ignore this field, but + it's required by the metaclass protocol + dictionary: The class dictionary of the class we're + constructing. dictionary[_DESCRIPTOR_KEY] must contain + a Descriptor object describing this protocol message + type. + """ + descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY] + + # If this is an _existing_ class looked up via `_concrete_class` in the + # __new__ method above, then we don't need to re-initialize anything. + existing_class = getattr(descriptor, '_concrete_class', None) + if existing_class: + assert existing_class is cls, ( + 'Duplicate `GeneratedProtocolMessageType` created for descriptor %r' + % (descriptor.full_name)) + return + + cls._message_set_decoders_by_tag = {} + cls._fields_by_tag = {} + if (descriptor.has_options and + descriptor.GetOptions().message_set_wire_format): + cls._message_set_decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = ( + decoder.MessageSetItemDecoder(descriptor), + None, + ) + + # Attach stuff to each FieldDescriptor for quick lookup later on. + for field in descriptor.fields: + _AttachFieldHelpers(cls, field) + + if descriptor.is_extendable and hasattr(descriptor.file, 'pool'): + extensions = descriptor.file.pool.FindAllExtensions(descriptor) + for ext in extensions: + _AttachFieldHelpers(cls, ext) + + descriptor._concrete_class = cls # pylint: disable=protected-access + _AddEnumValues(descriptor, cls) + _AddInitMethod(descriptor, cls) + _AddPropertiesForFields(descriptor, cls) + _AddPropertiesForExtensions(descriptor, cls) + _AddStaticMethods(cls) + _AddMessageMethods(descriptor, cls) + _AddPrivateHelperMethods(descriptor, cls) + + superclass = super(GeneratedProtocolMessageType, cls) + superclass.__init__(name, bases, dictionary) + + +# Stateless helpers for GeneratedProtocolMessageType below. +# Outside clients should not access these directly. +# +# I opted not to make any of these methods on the metaclass, to make it more +# clear that I'm not really using any state there and to keep clients from +# thinking that they have direct access to these construction helpers. + + +def _PropertyName(proto_field_name): + """Returns the name of the public property attribute which + clients can use to get and (in some cases) set the value + of a protocol message field. + + Args: + proto_field_name: The protocol message field name, exactly + as it appears (or would appear) in a .proto file. + """ + # TODO: Escape Python keywords (e.g., yield), and test this support. + # nnorwitz makes my day by writing: + # """ + # FYI. See the keyword module in the stdlib. This could be as simple as: + # + # if keyword.iskeyword(proto_field_name): + # return proto_field_name + "_" + # return proto_field_name + # """ + # Kenton says: The above is a BAD IDEA. People rely on being able to use + # getattr() and setattr() to reflectively manipulate field values. If we + # rename the properties, then every such user has to also make sure to apply + # the same transformation. Note that currently if you name a field "yield", + # you can still access it just fine using getattr/setattr -- it's not even + # that cumbersome to do so. + # TODO: Remove this method entirely if/when everyone agrees with my + # position. + return proto_field_name + + +def _AddSlots(message_descriptor, dictionary): + """Adds a __slots__ entry to dictionary, containing the names of all valid + attributes for this message type. + + Args: + message_descriptor: A Descriptor instance describing this message type. + dictionary: Class dictionary to which we'll add a '__slots__' entry. + """ + dictionary['__slots__'] = ['_cached_byte_size', + '_cached_byte_size_dirty', + '_fields', + '_unknown_fields', + '_is_present_in_parent', + '_listener', + '_listener_for_children', + '__weakref__', + '_oneofs'] + + +def _IsMessageSetExtension(field): + return (field.is_extension and + field.containing_type.has_options and + field.containing_type.GetOptions().message_set_wire_format and + field.type == _FieldDescriptor.TYPE_MESSAGE and + field.label == _FieldDescriptor.LABEL_OPTIONAL) + + +def _IsMapField(field): + return (field.type == _FieldDescriptor.TYPE_MESSAGE and + field.message_type._is_map_entry) + + +def _IsMessageMapField(field): + value_type = field.message_type.fields_by_name['value'] + return value_type.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE + +def _AttachFieldHelpers(cls, field_descriptor): + is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED + field_descriptor._default_constructor = _DefaultValueConstructorForField( + field_descriptor + ) + + def AddFieldByTag(wiretype, is_packed): + tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype) + cls._fields_by_tag[tag_bytes] = (field_descriptor, is_packed) + + AddFieldByTag( + type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type], False + ) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddFieldByTag(wire_format.WIRETYPE_LENGTH_DELIMITED, True) + + +def _MaybeAddEncoder(cls, field_descriptor): + if hasattr(field_descriptor, '_encoder'): + return + is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED) + is_map_entry = _IsMapField(field_descriptor) + is_packed = field_descriptor.is_packed + + if is_map_entry: + field_encoder = encoder.MapEncoder(field_descriptor) + sizer = encoder.MapSizer(field_descriptor, + _IsMessageMapField(field_descriptor)) + elif _IsMessageSetExtension(field_descriptor): + field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number) + sizer = encoder.MessageSetItemSizer(field_descriptor.number) + else: + field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type]( + field_descriptor.number, is_repeated, is_packed) + + field_descriptor._sizer = sizer + field_descriptor._encoder = field_encoder + + +def _MaybeAddDecoder(cls, field_descriptor): + if hasattr(field_descriptor, '_decoders'): + return + + is_repeated = field_descriptor.label == _FieldDescriptor.LABEL_REPEATED + is_map_entry = _IsMapField(field_descriptor) + helper_decoders = {} + + def AddDecoder(is_packed): + decode_type = field_descriptor.type + if (decode_type == _FieldDescriptor.TYPE_ENUM and + not field_descriptor.enum_type.is_closed): + decode_type = _FieldDescriptor.TYPE_INT32 + + oneof_descriptor = None + if field_descriptor.containing_oneof is not None: + oneof_descriptor = field_descriptor + + if is_map_entry: + is_message_map = _IsMessageMapField(field_descriptor) + + field_decoder = decoder.MapDecoder( + field_descriptor, _GetInitializeDefaultForMap(field_descriptor), + is_message_map) + elif decode_type == _FieldDescriptor.TYPE_STRING: + field_decoder = decoder.StringDecoder( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor, + not field_descriptor.has_presence) + elif field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + field_descriptor, field_descriptor._default_constructor) + else: + field_decoder = type_checkers.TYPE_TO_DECODER[decode_type]( + field_descriptor.number, is_repeated, is_packed, + # pylint: disable=protected-access + field_descriptor, field_descriptor._default_constructor, + not field_descriptor.has_presence) + + helper_decoders[is_packed] = field_decoder + + AddDecoder(False) + + if is_repeated and wire_format.IsTypePackable(field_descriptor.type): + # To support wire compatibility of adding packed = true, add a decoder for + # packed values regardless of the field's options. + AddDecoder(True) + + field_descriptor._decoders = helper_decoders + + +def _AddClassAttributesForNestedExtensions(descriptor, dictionary): + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + assert extension_name not in dictionary + dictionary[extension_name] = extension_field + + +def _AddEnumValues(descriptor, cls): + """Sets class-level attributes for all enum fields defined in this message. + + Also exporting a class-level object that can name enum values. + + Args: + descriptor: Descriptor object for this message type. + cls: Class we're constructing for this message type. + """ + for enum_type in descriptor.enum_types: + setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) + for enum_value in enum_type.values: + setattr(cls, enum_value.name, enum_value.number) + + +def _GetInitializeDefaultForMap(field): + if field.label != _FieldDescriptor.LABEL_REPEATED: + raise ValueError('map_entry set on non-repeated field %s' % ( + field.name)) + fields_by_name = field.message_type.fields_by_name + key_checker = type_checkers.GetTypeChecker(fields_by_name['key']) + + value_field = fields_by_name['value'] + if _IsMessageMapField(field): + def MakeMessageMapDefault(message): + return containers.MessageMap( + message._listener_for_children, value_field.message_type, key_checker, + field.message_type) + return MakeMessageMapDefault + else: + value_checker = type_checkers.GetTypeChecker(value_field) + def MakePrimitiveMapDefault(message): + return containers.ScalarMap( + message._listener_for_children, key_checker, value_checker, + field.message_type) + return MakePrimitiveMapDefault + +def _DefaultValueConstructorForField(field): + """Returns a function which returns a default value for a field. + + Args: + field: FieldDescriptor object for this field. + + The returned function has one argument: + message: Message instance containing this field, or a weakref proxy + of same. + + That function in turn returns a default value for this field. The default + value may refer back to |message| via a weak reference. + """ + + if _IsMapField(field): + return _GetInitializeDefaultForMap(field) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + if field.has_default_value and field.default_value != []: + raise ValueError('Repeated field default value not empty list: %s' % ( + field.default_value)) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + # We can't look at _concrete_class yet since it might not have + # been set. (Depends on order in which we initialize the classes). + message_type = field.message_type + def MakeRepeatedMessageDefault(message): + return containers.RepeatedCompositeFieldContainer( + message._listener_for_children, field.message_type) + return MakeRepeatedMessageDefault + else: + type_checker = type_checkers.GetTypeChecker(field) + def MakeRepeatedScalarDefault(message): + return containers.RepeatedScalarFieldContainer( + message._listener_for_children, type_checker) + return MakeRepeatedScalarDefault + + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + message_type = field.message_type + def MakeSubMessageDefault(message): + # _concrete_class may not yet be initialized. + if not hasattr(message_type, '_concrete_class'): + from google.protobuf import message_factory + message_factory.GetMessageClass(message_type) + result = message_type._concrete_class() + result._SetListener( + _OneofListener(message, field) + if field.containing_oneof is not None + else message._listener_for_children) + return result + return MakeSubMessageDefault + + def MakeScalarDefault(message): + # TODO: This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return field.default_value + return MakeScalarDefault + + +def _ReraiseTypeErrorWithFieldName(message_name, field_name): + """Re-raise the currently-handled TypeError with the field name added.""" + exc = sys.exc_info()[1] + if len(exc.args) == 1 and type(exc) is TypeError: + # simple TypeError; add field name to exception message + exc = TypeError('%s for field %s.%s' % (str(exc), message_name, field_name)) + + # re-raise possibly-amended exception with original traceback: + raise exc.with_traceback(sys.exc_info()[2]) + + +def _AddInitMethod(message_descriptor, cls): + """Adds an __init__ method to cls.""" + + def _GetIntegerEnumValue(enum_type, value): + """Convert a string or integer enum value to an integer. + + If the value is a string, it is converted to the enum value in + enum_type with the same name. If the value is not a string, it's + returned as-is. (No conversion or bounds-checking is done.) + """ + if isinstance(value, str): + try: + return enum_type.values_by_name[value].number + except KeyError: + raise ValueError('Enum type %s: unknown label "%s"' % ( + enum_type.full_name, value)) + return value + + def init(self, **kwargs): + self._cached_byte_size = 0 + self._cached_byte_size_dirty = len(kwargs) > 0 + self._fields = {} + # Contains a mapping from oneof field descriptors to the descriptor + # of the currently set field in that oneof field. + self._oneofs = {} + + # _unknown_fields is () when empty for efficiency, and will be turned into + # a list if fields are added. + self._unknown_fields = () + self._is_present_in_parent = False + self._listener = message_listener_mod.NullMessageListener() + self._listener_for_children = _Listener(self) + for field_name, field_value in kwargs.items(): + field = _GetFieldByName(message_descriptor, field_name) + if field is None: + raise TypeError('%s() got an unexpected keyword argument "%s"' % + (message_descriptor.name, field_name)) + if field_value is None: + # field=None is the same as no field at all. + continue + if field.label == _FieldDescriptor.LABEL_REPEATED: + field_copy = field._default_constructor(self) + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite + if _IsMapField(field): + if _IsMessageMapField(field): + for key in field_value: + field_copy[key].MergeFrom(field_value[key]) + else: + field_copy.update(field_value) + else: + for val in field_value: + if isinstance(val, dict): + field_copy.add(**val) + else: + field_copy.add().MergeFrom(val) + else: # Scalar + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = [_GetIntegerEnumValue(field.enum_type, val) + for val in field_value] + field_copy.extend(field_value) + self._fields[field] = field_copy + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + field_copy = field._default_constructor(self) + new_val = None + if isinstance(field_value, message_mod.Message): + new_val = field_value + elif isinstance(field_value, dict): + if field.message_type.full_name == _StructFullTypeName: + field_copy.Clear() + if len(field_value) == 1 and 'fields' in field_value: + try: + field_copy.update(field_value) + except: + # Fall back to init normal message field + field_copy.Clear() + new_val = field.message_type._concrete_class(**field_value) + else: + field_copy.update(field_value) + else: + new_val = field.message_type._concrete_class(**field_value) + elif hasattr(field_copy, '_internal_assign'): + field_copy._internal_assign(field_value) + else: + raise TypeError( + 'Message field {0}.{1} must be initialized with a ' + 'dict or instance of same class, got {2}.'.format( + message_descriptor.name, + field_name, + type(field_value).__name__, + ) + ) + + if new_val != None: + try: + field_copy.MergeFrom(new_val) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + self._fields[field] = field_copy + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: + field_value = _GetIntegerEnumValue(field.enum_type, field_value) + try: + setattr(self, field_name, field_value) + except TypeError: + _ReraiseTypeErrorWithFieldName(message_descriptor.name, field_name) + + init.__module__ = None + init.__doc__ = None + cls.__init__ = init + + +def _GetFieldByName(message_descriptor, field_name): + """Returns a field descriptor by field name. + + Args: + message_descriptor: A Descriptor describing all fields in message. + field_name: The name of the field to retrieve. + Returns: + The field descriptor associated with the field name. + """ + try: + return message_descriptor.fields_by_name[field_name] + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + +def _AddPropertiesForFields(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + for field in descriptor.fields: + _AddPropertiesForField(field, cls) + + if descriptor.is_extendable: + # _ExtensionDict is just an adaptor with no state so we allocate a new one + # every time it is accessed. + cls.Extensions = property(lambda self: _ExtensionDict(self)) + + +def _AddPropertiesForField(field, cls): + """Adds a public property for a protocol message field. + Clients can use this property to get and (in the case + of non-repeated scalar fields) directly set the value + of a protocol message field. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # Catch it if we add other types that we should + # handle specially here. + assert _FieldDescriptor.MAX_CPPTYPE == 10 + + constant_name = field.name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, field.number) + + if field.label == _FieldDescriptor.LABEL_REPEATED: + _AddPropertiesForRepeatedField(field, cls) + elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + _AddPropertiesForNonRepeatedCompositeField(field, cls) + else: + _AddPropertiesForNonRepeatedScalarField(field, cls) + + +class _FieldProperty(property): + __slots__ = ('DESCRIPTOR',) + + def __init__(self, descriptor, getter, setter, doc): + property.__init__(self, getter, setter, doc=doc) + self.DESCRIPTOR = descriptor + + +def _AddPropertiesForRepeatedField(field, cls): + """Adds a public property for a "repeated" protocol message field. Clients + can use this property to get the value of the field, which will be either a + RepeatedScalarFieldContainer or RepeatedCompositeFieldContainer (see + below). + + Note that when clients add values to these containers, we perform + type-checking in the case of repeated scalar fields, and we also set any + necessary "has" bits as a side-effect. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + raise AttributeError('Assignment not allowed to repeated field ' + '"%s" in protocol message object.' % proto_field_name) + + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedScalarField(field, cls): + """Adds a public property for a nonrepeated, scalar protocol message field. + Clients can use this property to get and directly set the value of the field. + Note that when the client sets the value of a field by using this property, + all necessary "has" bits are set as a side-effect, and we also perform + type-checking. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + type_checker = type_checkers.GetTypeChecker(field) + default_value = field.default_value + + def getter(self): + # TODO: This may be broken since there may not be + # default_value. Combine with has_default_value somehow. + return self._fields.get(field, default_value) + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + def field_setter(self, new_value): + # pylint: disable=protected-access + # Testing the value for truthiness captures all of the proto3 defaults + # (0, 0.0, enum 0, and False). + try: + new_value = type_checker.CheckValue(new_value) + except TypeError as e: + raise TypeError( + 'Cannot set %s to %.1024r: %s' % (field.full_name, new_value, e)) + if not field.has_presence and not new_value: + self._fields.pop(field, None) + else: + self._fields[field] = new_value + # Check _cached_byte_size_dirty inline to improve performance, since scalar + # setters are called frequently. + if not self._cached_byte_size_dirty: + self._Modified() + + if field.containing_oneof: + def setter(self, new_value): + field_setter(self, new_value) + self._UpdateOneofState(field) + else: + setter = field_setter + + setter.__module__ = None + setter.__doc__ = 'Setter for %s.' % proto_field_name + + # Add a property to encapsulate the getter/setter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForNonRepeatedCompositeField(field, cls): + """Adds a public property for a nonrepeated, composite protocol message field. + A composite field is a "group" or "message" field. + + Clients can use this property to get the value of the field, but cannot + assign to the property directly. + + Args: + field: A FieldDescriptor for this field. + cls: The class we're constructing. + """ + # TODO: Remove duplication with similar method + # for non-repeated scalars. + proto_field_name = field.name + property_name = _PropertyName(proto_field_name) + + def getter(self): + field_value = self._fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + + # Atomically check if another thread has preempted us and, if not, swap + # in the new object we just created. If someone has preempted us, we + # take that object and discard ours. + # WARNING: We are relying on setdefault() being atomic. This is true + # in CPython but we haven't investigated others. This warning appears + # in several other locations in this file. + field_value = self._fields.setdefault(field, field_value) + return field_value + getter.__module__ = None + getter.__doc__ = 'Getter for %s.' % proto_field_name + + # We define a setter just so we can throw an exception with a more + # helpful error message. + def setter(self, new_value): + if field.message_type.full_name == 'google.protobuf.Timestamp': + getter(self) + self._fields[field].FromDatetime(new_value) + elif field.message_type.full_name == 'google.protobuf.Duration': + getter(self) + self._fields[field].FromTimedelta(new_value) + elif field.message_type.full_name == _StructFullTypeName: + getter(self) + self._fields[field].Clear() + self._fields[field].update(new_value) + elif field.message_type.full_name == _ListValueFullTypeName: + getter(self) + self._fields[field].Clear() + self._fields[field].extend(new_value) + else: + raise AttributeError( + 'Assignment not allowed to composite field ' + '"%s" in protocol message object.' % proto_field_name + ) + + # Add a property to encapsulate the getter. + doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name + setattr(cls, property_name, _FieldProperty(field, getter, setter, doc=doc)) + + +def _AddPropertiesForExtensions(descriptor, cls): + """Adds properties for all fields in this protocol message type.""" + extensions = descriptor.extensions_by_name + for extension_name, extension_field in extensions.items(): + constant_name = extension_name.upper() + '_FIELD_NUMBER' + setattr(cls, constant_name, extension_field.number) + + # TODO: Migrate all users of these attributes to functions like + # pool.FindExtensionByNumber(descriptor). + if descriptor.file is not None: + # TODO: Use cls.MESSAGE_FACTORY.pool when available. + pool = descriptor.file.pool + +def _AddStaticMethods(cls): + def FromString(s): + message = cls() + message.MergeFromString(s) + return message + cls.FromString = staticmethod(FromString) + + +def _IsPresent(item): + """Given a (FieldDescriptor, value) tuple from _fields, return true if the + value should be included in the list returned by ListFields().""" + + if item[0].label == _FieldDescriptor.LABEL_REPEATED: + return bool(item[1]) + elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + return item[1]._is_present_in_parent + else: + return True + + +def _AddListFieldsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ListFields(self): + all_fields = [item for item in self._fields.items() if _IsPresent(item)] + all_fields.sort(key = lambda item: item[0].number) + return all_fields + + cls.ListFields = ListFields + + +def _AddHasFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + hassable_fields = {} + for field in message_descriptor.fields: + if field.label == _FieldDescriptor.LABEL_REPEATED: + continue + # For proto3, only submessages and fields inside a oneof have presence. + if not field.has_presence: + continue + hassable_fields[field.name] = field + + # Has methods are supported for oneof descriptors. + for oneof in message_descriptor.oneofs: + hassable_fields[oneof.name] = oneof + + def HasField(self, field_name): + try: + field = hassable_fields[field_name] + except KeyError as exc: + raise ValueError('Protocol message %s has no non-repeated field "%s" ' + 'nor has presence is not available for this field.' % ( + message_descriptor.full_name, field_name)) from exc + + if isinstance(field, descriptor_mod.OneofDescriptor): + try: + return HasField(self, self._oneofs[field].name) + except KeyError: + return False + else: + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field) + return value is not None and value._is_present_in_parent + else: + return field in self._fields + + cls.HasField = HasField + + +def _AddClearFieldMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def ClearField(self, field_name): + try: + field = message_descriptor.fields_by_name[field_name] + except KeyError: + try: + field = message_descriptor.oneofs_by_name[field_name] + if field in self._oneofs: + field = self._oneofs[field] + else: + return + except KeyError: + raise ValueError('Protocol message %s has no "%s" field.' % + (message_descriptor.name, field_name)) + + if field in self._fields: + # To match the C++ implementation, we need to invalidate iterators + # for map fields when ClearField() happens. + if hasattr(self._fields[field], 'InvalidateIterators'): + self._fields[field].InvalidateIterators() + + # Note: If the field is a sub-message, its listener will still point + # at us. That's fine, because the worst than can happen is that it + # will call _Modified() and invalidate our byte size. Big deal. + del self._fields[field] + + if self._oneofs.get(field.containing_oneof, None) is field: + del self._oneofs[field.containing_oneof] + + # Always call _Modified() -- even if nothing was changed, this is + # a mutating method, and thus calling it should cause the field to become + # present in the parent message. + self._Modified() + + cls.ClearField = ClearField + + +def _AddClearExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def ClearExtension(self, field_descriptor): + extension_dict._VerifyExtensionHandle(self, field_descriptor) + + # Similar to ClearField(), above. + if field_descriptor in self._fields: + del self._fields[field_descriptor] + self._Modified() + cls.ClearExtension = ClearExtension + + +def _AddHasExtensionMethod(cls): + """Helper for _AddMessageMethods().""" + def HasExtension(self, field_descriptor): + extension_dict._VerifyExtensionHandle(self, field_descriptor) + if field_descriptor.label == _FieldDescriptor.LABEL_REPEATED: + raise KeyError('"%s" is repeated.' % field_descriptor.full_name) + + if field_descriptor.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + value = self._fields.get(field_descriptor) + return value is not None and value._is_present_in_parent + else: + return field_descriptor in self._fields + cls.HasExtension = HasExtension + +def _InternalUnpackAny(msg): + """Unpacks Any message and returns the unpacked message. + + This internal method is different from public Any Unpack method which takes + the target message as argument. _InternalUnpackAny method does not have + target message type and need to find the message type in descriptor pool. + + Args: + msg: An Any message to be unpacked. + + Returns: + The unpacked message. + """ + # TODO: Don't use the factory of generated messages. + # To make Any work with custom factories, use the message factory of the + # parent message. + # pylint: disable=g-import-not-at-top + from google.protobuf import symbol_database + factory = symbol_database.Default() + + type_url = msg.type_url + + if not type_url: + return None + + # TODO: For now we just strip the hostname. Better logic will be + # required. + type_name = type_url.split('/')[-1] + descriptor = factory.pool.FindMessageTypeByName(type_name) + + if descriptor is None: + return None + + # Unable to import message_factory at top because of circular import. + # pylint: disable=g-import-not-at-top + from google.protobuf import message_factory + message_class = message_factory.GetMessageClass(descriptor) + message = message_class() + + message.ParseFromString(msg.value) + return message + + +def _AddEqualsMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __eq__(self, other): + if self.DESCRIPTOR.full_name == _ListValueFullTypeName and isinstance( + other, list + ): + return self._internal_compare(other) + if self.DESCRIPTOR.full_name == _StructFullTypeName and isinstance( + other, dict + ): + return self._internal_compare(other) + + if (not isinstance(other, message_mod.Message) or + other.DESCRIPTOR != self.DESCRIPTOR): + return NotImplemented + + if self is other: + return True + + if self.DESCRIPTOR.full_name == _AnyFullTypeName: + any_a = _InternalUnpackAny(self) + any_b = _InternalUnpackAny(other) + if any_a and any_b: + return any_a == any_b + + if not self.ListFields() == other.ListFields(): + return False + + # TODO: Fix UnknownFieldSet to consider MessageSet extensions, + # then use it for the comparison. + unknown_fields = list(self._unknown_fields) + unknown_fields.sort() + other_unknown_fields = list(other._unknown_fields) + other_unknown_fields.sort() + return unknown_fields == other_unknown_fields + + cls.__eq__ = __eq__ + + +def _AddStrMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __str__(self): + return text_format.MessageToString(self) + cls.__str__ = __str__ + + +def _AddReprMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def __repr__(self): + return text_format.MessageToString(self) + cls.__repr__ = __repr__ + + +def _AddUnicodeMethod(unused_message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def __unicode__(self): + return text_format.MessageToString(self, as_utf8=True).decode('utf-8') + cls.__unicode__ = __unicode__ + + +def _AddContainsMethod(message_descriptor, cls): + + if message_descriptor.full_name == 'google.protobuf.Struct': + def __contains__(self, key): + return key in self.fields + elif message_descriptor.full_name == 'google.protobuf.ListValue': + def __contains__(self, value): + return value in self.items() + else: + def __contains__(self, field): + return self.HasField(field) + + cls.__contains__ = __contains__ + + +def _BytesForNonRepeatedElement(value, field_number, field_type): + """Returns the number of bytes needed to serialize a non-repeated element. + The returned byte count includes space for tag information and any + other additional space associated with serializing value. + + Args: + value: Value we're serializing. + field_number: Field number of this value. (Since the field number + is stored as part of a varint-encoded tag, this has an impact + on the total bytes required to serialize the value). + field_type: The type of the field. One of the TYPE_* constants + within FieldDescriptor. + """ + try: + fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type] + return fn(field_number, value) + except KeyError: + raise message_mod.EncodeError('Unrecognized field type: %d' % field_type) + + +def _AddByteSizeMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def ByteSize(self): + if not self._cached_byte_size_dirty: + return self._cached_byte_size + + size = 0 + descriptor = self.DESCRIPTOR + if descriptor._is_map_entry: + # Fields of map entry should always be serialized. + key_field = descriptor.fields_by_name['key'] + _MaybeAddEncoder(cls, key_field) + size = key_field._sizer(self.key) + value_field = descriptor.fields_by_name['value'] + _MaybeAddEncoder(cls, value_field) + size += value_field._sizer(self.value) + else: + for field_descriptor, field_value in self.ListFields(): + _MaybeAddEncoder(cls, field_descriptor) + size += field_descriptor._sizer(field_value) + for tag_bytes, value_bytes in self._unknown_fields: + size += len(tag_bytes) + len(value_bytes) + + self._cached_byte_size = size + self._cached_byte_size_dirty = False + self._listener_for_children.dirty = False + return size + + cls.ByteSize = ByteSize + + +def _AddSerializeToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializeToString(self, **kwargs): + # Check if the message has all of its required fields set. + if not self.IsInitialized(): + raise message_mod.EncodeError( + 'Message %s is missing required fields: %s' % ( + self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors()))) + return self.SerializePartialToString(**kwargs) + cls.SerializeToString = SerializeToString + + +def _AddSerializePartialToStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + + def SerializePartialToString(self, **kwargs): + out = BytesIO() + self._InternalSerialize(out.write, **kwargs) + return out.getvalue() + cls.SerializePartialToString = SerializePartialToString + + def InternalSerialize(self, write_bytes, deterministic=None): + if deterministic is None: + deterministic = ( + api_implementation.IsPythonDefaultSerializationDeterministic()) + else: + deterministic = bool(deterministic) + + descriptor = self.DESCRIPTOR + if descriptor._is_map_entry: + # Fields of map entry should always be serialized. + key_field = descriptor.fields_by_name['key'] + _MaybeAddEncoder(cls, key_field) + key_field._encoder(write_bytes, self.key, deterministic) + value_field = descriptor.fields_by_name['value'] + _MaybeAddEncoder(cls, value_field) + value_field._encoder(write_bytes, self.value, deterministic) + else: + for field_descriptor, field_value in self.ListFields(): + _MaybeAddEncoder(cls, field_descriptor) + field_descriptor._encoder(write_bytes, field_value, deterministic) + for tag_bytes, value_bytes in self._unknown_fields: + write_bytes(tag_bytes) + write_bytes(value_bytes) + cls._InternalSerialize = InternalSerialize + + +def _AddMergeFromStringMethod(message_descriptor, cls): + """Helper for _AddMessageMethods().""" + def MergeFromString(self, serialized): + serialized = memoryview(serialized) + length = len(serialized) + try: + if self._InternalParse(serialized, 0, length) != length: + # The only reason _InternalParse would return early is if it + # encountered an end-group tag. + raise message_mod.DecodeError('Unexpected end-group tag.') + except (IndexError, TypeError): + # Now ord(buf[p:p+1]) == ord('') gets TypeError. + raise message_mod.DecodeError('Truncated message.') + except struct.error as e: + raise message_mod.DecodeError(e) + return length # Return this for legacy reasons. + cls.MergeFromString = MergeFromString + + local_ReadTag = decoder.ReadTag + local_SkipField = decoder.SkipField + fields_by_tag = cls._fields_by_tag + message_set_decoders_by_tag = cls._message_set_decoders_by_tag + + def InternalParse(self, buffer, pos, end): + """Create a message from serialized bytes. + + Args: + self: Message, instance of the proto message object. + buffer: memoryview of the serialized data. + pos: int, position to start in the serialized data. + end: int, end position of the serialized data. + + Returns: + Message object. + """ + # Guard against internal misuse, since this function is called internally + # quite extensively, and its easy to accidentally pass bytes. + assert isinstance(buffer, memoryview) + self._Modified() + field_dict = self._fields + while pos != end: + (tag_bytes, new_pos) = local_ReadTag(buffer, pos) + field_decoder, field_des = message_set_decoders_by_tag.get( + tag_bytes, (None, None) + ) + if field_decoder: + pos = field_decoder(buffer, new_pos, end, self, field_dict) + continue + field_des, is_packed = fields_by_tag.get(tag_bytes, (None, None)) + if field_des is None: + if not self._unknown_fields: # pylint: disable=protected-access + self._unknown_fields = [] # pylint: disable=protected-access + # pylint: disable=protected-access + (tag, _) = decoder._DecodeVarint(tag_bytes, 0) + field_number, wire_type = wire_format.UnpackTag(tag) + if field_number == 0: + raise message_mod.DecodeError('Field number 0 is illegal.') + # TODO: remove old_pos. + old_pos = new_pos + (data, new_pos) = decoder._DecodeUnknownField( + buffer, new_pos, wire_type) # pylint: disable=protected-access + if new_pos == -1: + return pos + # TODO: remove _unknown_fields. + new_pos = local_SkipField(buffer, old_pos, end, tag_bytes) + if new_pos == -1: + return pos + self._unknown_fields.append( + (tag_bytes, buffer[old_pos:new_pos].tobytes())) + pos = new_pos + else: + _MaybeAddDecoder(cls, field_des) + field_decoder = field_des._decoders[is_packed] + pos = field_decoder(buffer, new_pos, end, self, field_dict) + if field_des.containing_oneof: + self._UpdateOneofState(field_des) + return pos + cls._InternalParse = InternalParse + + +def _AddIsInitializedMethod(message_descriptor, cls): + """Adds the IsInitialized and FindInitializationError methods to the + protocol message class.""" + + required_fields = [field for field in message_descriptor.fields + if field.label == _FieldDescriptor.LABEL_REQUIRED] + + def IsInitialized(self, errors=None): + """Checks if all required fields of a message are set. + + Args: + errors: A list which, if provided, will be populated with the field + paths of all missing required fields. + + Returns: + True iff the specified message has all required fields set. + """ + + # Performance is critical so we avoid HasField() and ListFields(). + + for field in required_fields: + if (field not in self._fields or + (field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and + not self._fields[field]._is_present_in_parent)): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + for field, value in list(self._fields.items()): # dict can change size! + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.label == _FieldDescriptor.LABEL_REPEATED: + if (field.message_type._is_map_entry): + continue + for element in value: + if not element.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + elif value._is_present_in_parent and not value.IsInitialized(): + if errors is not None: + errors.extend(self.FindInitializationErrors()) + return False + + return True + + cls.IsInitialized = IsInitialized + + def FindInitializationErrors(self): + """Finds required fields which are not initialized. + + Returns: + A list of strings. Each string is a path to an uninitialized field from + the top-level message, e.g. "foo.bar[5].baz". + """ + + errors = [] # simplify things + + for field in required_fields: + if not self.HasField(field.name): + errors.append(field.name) + + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if field.is_extension: + name = '(%s)' % field.full_name + else: + name = field.name + + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + element = value[key] + prefix = '%s[%s].' % (name, key) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + # ScalarMaps can't have any initialization errors. + pass + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for i in range(len(value)): + element = value[i] + prefix = '%s[%d].' % (name, i) + sub_errors = element.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + else: + prefix = name + '.' + sub_errors = value.FindInitializationErrors() + errors += [prefix + error for error in sub_errors] + + return errors + + cls.FindInitializationErrors = FindInitializationErrors + + +def _FullyQualifiedClassName(klass): + module = klass.__module__ + name = getattr(klass, '__qualname__', klass.__name__) + if module in (None, 'builtins', '__builtin__'): + return name + return module + '.' + name + + +def _AddMergeFromMethod(cls): + LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED + CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE + + def MergeFrom(self, msg): + if not isinstance(msg, cls): + raise TypeError( + 'Parameter to MergeFrom() must be instance of same class: ' + 'expected %s got %s.' % (_FullyQualifiedClassName(cls), + _FullyQualifiedClassName(msg.__class__))) + + assert msg is not self + self._Modified() + + fields = self._fields + + for field, value in msg._fields.items(): + if field.label == LABEL_REPEATED: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + elif field.cpp_type == CPPTYPE_MESSAGE: + if value._is_present_in_parent: + field_value = fields.get(field) + if field_value is None: + # Construct a new object to represent this field. + field_value = field._default_constructor(self) + fields[field] = field_value + field_value.MergeFrom(value) + else: + self._fields[field] = value + if field.containing_oneof: + self._UpdateOneofState(field) + + if msg._unknown_fields: + if not self._unknown_fields: + self._unknown_fields = [] + self._unknown_fields.extend(msg._unknown_fields) + + cls.MergeFrom = MergeFrom + + +def _AddWhichOneofMethod(message_descriptor, cls): + def WhichOneof(self, oneof_name): + """Returns the name of the currently set field inside a oneof, or None.""" + try: + field = message_descriptor.oneofs_by_name[oneof_name] + except KeyError: + raise ValueError( + 'Protocol message has no oneof "%s" field.' % oneof_name) + + nested_field = self._oneofs.get(field, None) + if nested_field is not None and self.HasField(nested_field.name): + return nested_field.name + else: + return None + + cls.WhichOneof = WhichOneof + + +def _Clear(self): + # Clear fields. + self._fields = {} + self._unknown_fields = () + + self._oneofs = {} + self._Modified() + + +def _UnknownFields(self): + raise NotImplementedError('Please use the add-on feaure ' + 'unknown_fields.UnknownFieldSet(message) in ' + 'unknown_fields.py instead.') + + +def _DiscardUnknownFields(self): + self._unknown_fields = [] + for field, value in self.ListFields(): + if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: + if _IsMapField(field): + if _IsMessageMapField(field): + for key in value: + value[key].DiscardUnknownFields() + elif field.label == _FieldDescriptor.LABEL_REPEATED: + for sub_message in value: + sub_message.DiscardUnknownFields() + else: + value.DiscardUnknownFields() + + +def _SetListener(self, listener): + if listener is None: + self._listener = message_listener_mod.NullMessageListener() + else: + self._listener = listener + + +def _AddMessageMethods(message_descriptor, cls): + """Adds implementations of all Message methods to cls.""" + _AddListFieldsMethod(message_descriptor, cls) + _AddHasFieldMethod(message_descriptor, cls) + _AddClearFieldMethod(message_descriptor, cls) + if message_descriptor.is_extendable: + _AddClearExtensionMethod(cls) + _AddHasExtensionMethod(cls) + _AddEqualsMethod(message_descriptor, cls) + _AddStrMethod(message_descriptor, cls) + _AddReprMethod(message_descriptor, cls) + _AddUnicodeMethod(message_descriptor, cls) + _AddContainsMethod(message_descriptor, cls) + _AddByteSizeMethod(message_descriptor, cls) + _AddSerializeToStringMethod(message_descriptor, cls) + _AddSerializePartialToStringMethod(message_descriptor, cls) + _AddMergeFromStringMethod(message_descriptor, cls) + _AddIsInitializedMethod(message_descriptor, cls) + _AddMergeFromMethod(cls) + _AddWhichOneofMethod(message_descriptor, cls) + # Adds methods which do not depend on cls. + cls.Clear = _Clear + cls.DiscardUnknownFields = _DiscardUnknownFields + cls._SetListener = _SetListener + + +def _AddPrivateHelperMethods(message_descriptor, cls): + """Adds implementation of private helper methods to cls.""" + + def Modified(self): + """Sets the _cached_byte_size_dirty bit to true, + and propagates this to our listener iff this was a state change. + """ + + # Note: Some callers check _cached_byte_size_dirty before calling + # _Modified() as an extra optimization. So, if this method is ever + # changed such that it does stuff even when _cached_byte_size_dirty is + # already true, the callers need to be updated. + if not self._cached_byte_size_dirty: + self._cached_byte_size_dirty = True + self._listener_for_children.dirty = True + self._is_present_in_parent = True + self._listener.Modified() + + def _UpdateOneofState(self, field): + """Sets field as the active field in its containing oneof. + + Will also delete currently active field in the oneof, if it is different + from the argument. Does not mark the message as modified. + """ + other_field = self._oneofs.setdefault(field.containing_oneof, field) + if other_field is not field: + del self._fields[other_field] + self._oneofs[field.containing_oneof] = field + + cls._Modified = Modified + cls.SetInParent = Modified + cls._UpdateOneofState = _UpdateOneofState + + +class _Listener(object): + + """MessageListener implementation that a parent message registers with its + child message. + + In order to support semantics like: + + foo.bar.baz.moo = 23 + assert foo.HasField('bar') + + ...child objects must have back references to their parents. + This helper class is at the heart of this support. + """ + + def __init__(self, parent_message): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + """ + # This listener establishes a back reference from a child (contained) object + # to its parent (containing) object. We make this a weak reference to avoid + # creating cyclic garbage when the client finishes with the 'parent' object + # in the tree. + if isinstance(parent_message, weakref.ProxyType): + self._parent_message_weakref = parent_message + else: + self._parent_message_weakref = weakref.proxy(parent_message) + + # As an optimization, we also indicate directly on the listener whether + # or not the parent message is dirty. This way we can avoid traversing + # up the tree in the common case. + self.dirty = False + + def Modified(self): + if self.dirty: + return + try: + # Propagate the signal to our parents iff this is the first field set. + self._parent_message_weakref._Modified() + except ReferenceError: + # We can get here if a client has kept a reference to a child object, + # and is now setting a field on it, but the child's parent has been + # garbage-collected. This is not an error. + pass + + +class _OneofListener(_Listener): + """Special listener implementation for setting composite oneof fields.""" + + def __init__(self, parent_message, field): + """Args: + parent_message: The message whose _Modified() method we should call when + we receive Modified() messages. + field: The descriptor of the field being set in the parent message. + """ + super(_OneofListener, self).__init__(parent_message) + self._field = field + + def Modified(self): + """Also updates the state of the containing oneof in the parent message.""" + try: + self._parent_message_weakref._UpdateOneofState(self._field) + super(_OneofListener, self).Modified() + except ReferenceError: + pass diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/internal/type_checkers.py b/.venv/lib/python3.11/site-packages/google/protobuf/internal/type_checkers.py new file mode 100644 index 0000000000000000000000000000000000000000..04ccc985001850e3eca0da37aa95435090a4c619 --- /dev/null +++ b/.venv/lib/python3.11/site-packages/google/protobuf/internal/type_checkers.py @@ -0,0 +1,408 @@ +# Protocol Buffers - Google's data interchange format +# Copyright 2008 Google Inc. All rights reserved. +# +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file or at +# https://developers.google.com/open-source/licenses/bsd + +"""Provides type checking routines. + +This module defines type checking utilities in the forms of dictionaries: + +VALUE_CHECKERS: A dictionary of field types and a value validation object. +TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing + function. +TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization + function. +FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their + corresponding wire types. +TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization + function. +""" + +__author__ = 'robinson@google.com (Will Robinson)' + +import struct +import numbers + +from google.protobuf.internal import decoder +from google.protobuf.internal import encoder +from google.protobuf.internal import wire_format +from google.protobuf import descriptor + +_FieldDescriptor = descriptor.FieldDescriptor + + +def TruncateToFourByteFloat(original): + return struct.unpack(' _FLOAT_MAX: + return _INF + if converted_value < _FLOAT_MIN: + return _NEG_INF + + return TruncateToFourByteFloat(converted_value) + +# Type-checkers for all scalar CPPTYPEs. +_VALUE_CHECKERS = { + _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), + _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), + _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), + _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), + _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), + _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), + _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), +} + + +# Map from field type to a function F, such that F(field_num, value) +# gives the total byte size for a value of the given type. This +# byte size includes tag information and any other additional space +# associated with serializing "value". +TYPE_TO_BYTE_SIZE_FN = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, + _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, + _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, + _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, + _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, + _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, + _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, + _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, + _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, + _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, + _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, + _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, + _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, + _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, + _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, + _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, + _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, + _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize + } + + +# Maps from field types to encoder constructors. +TYPE_TO_ENCODER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, + _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, + _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, + _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, + _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, + _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, + _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, + _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, + } + + +# Maps from field types to sizer constructors. +TYPE_TO_SIZER = { + _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, + _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, + _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, + _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, + _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, + _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, + _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, + _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, + _FieldDescriptor.TYPE_STRING: encoder.StringSizer, + _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, + _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, + _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, + _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, + _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, + _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, + _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, + _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, + _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, + } + + +# Maps from field type to a decoder constructor. +TYPE_TO_DECODER = { + _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, + _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, + _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, + _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, + _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, + _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, + _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, + _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, + _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, + _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, + _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, + _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, + _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, + _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, + _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, + _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, + _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, + _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, + } + +# Maps from field type to expected wiretype. +FIELD_TYPE_TO_WIRE_TYPE = { + _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_STRING: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, + _FieldDescriptor.TYPE_MESSAGE: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_BYTES: + wire_format.WIRETYPE_LENGTH_DELIMITED, + _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, + _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, + _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, + _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, + } diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/testdata/__init__.py b/.venv/lib/python3.11/site-packages/google/protobuf/testdata/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/testdata/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/testdata/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0587f6c4dd51ca7e32c3b4135919489c55c5c34d Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/testdata/__pycache__/__init__.cpython-311.pyc differ diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/util/__init__.py b/.venv/lib/python3.11/site-packages/google/protobuf/util/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/.venv/lib/python3.11/site-packages/google/protobuf/util/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/google/protobuf/util/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..964a474fbdb5de8f51fcaf2a80a2502d29e78420 Binary files /dev/null and b/.venv/lib/python3.11/site-packages/google/protobuf/util/__pycache__/__init__.cpython-311.pyc differ