repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
googleapis/google-api-java-client-services
35,559
clients/google-api-services-compute/v1/2.0.0/com/google/api/services/compute/model/TargetHttpsProxy.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents a Target HTTPS Proxy resource. * * Google Compute Engine has two Target HTTPS Proxy resources: * * * [Global](/compute/docs/reference/rest/v1/targetHttpsProxies) * * [Regional](/compute/docs/reference/rest/v1/regionTargetHttpsProxies) * * A target HTTPS proxy is a component of Google Cloud HTTPS load balancers. * * * targetHttpsProxies are used by global external Application Load Balancers, classic * Application Load Balancers, cross-region internal Application Load Balancers, and Traffic * Director. * regionTargetHttpsProxies are used by regional internal Application Load Balancers * and regional external Application Load Balancers. * * Forwarding rules reference a target HTTPS proxy, and the target proxy then references a URL map. * For more information, readUsing Target Proxies and Forwarding rule concepts. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class TargetHttpsProxy extends com.google.api.client.json.GenericJson { /** * Optional. A URL referring to a networksecurity.AuthorizationPolicy resource that describes how * the proxy should authorize inbound traffic. If left blank, access will not be restricted by an * authorization policy. * * Refer to the AuthorizationPolicy resource for additional details. * * authorizationPolicy only applies to a globalTargetHttpsProxy attached toglobalForwardingRules * with theloadBalancingScheme set to INTERNAL_SELF_MANAGED. * * Note: This field currently has no impact. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String authorizationPolicy; /** * URL of a certificate map that identifies a certificate map associated with the given target * proxy. This field can only be set for Global external Application Load Balancer or Classic * Application Load Balancer. For other products use Certificate Manager Certificates instead. * * If set, sslCertificates will be ignored. * * Accepted format is//certificatemanager.googleapis.com/projects/{project}/locations/{location}/ * certificateMaps/{resourceName}. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String certificateMap; /** * [Output Only] Creation timestamp inRFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpsProxy. An up-to- * date fingerprint must be provided in order to patch the TargetHttpsProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpsProxy. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String fingerprint; /** * Specifies how long to keep a connection open, after completing a response, while there is no * matching traffic (in seconds). If an HTTP keep-alive is not specified, a default value (610 * seconds) will be used. * * For global external Application Load Balancers, the minimum allowed value is 5 seconds and the * maximum allowed value is 1200 seconds. * * For classic Application Load Balancers, this option is not supported. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer httpKeepAliveTimeoutSec; /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * [Output Only] Type of resource. Alwayscompute#targetHttpsProxy for target HTTPS proxies. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * This field only applies when the forwarding rule that references this target proxy has a * loadBalancingScheme set toINTERNAL_SELF_MANAGED. * * When this field is set to true, Envoy proxies set up inbound traffic interception and bind to * the IP address and port specified in the forwarding rule. This is generally useful when using * Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar * proxy). The Envoy proxy listens for inbound requests and handles requests when it receives * them. * * The default is false. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean proxyBind; /** * Specifies the QUIC override policy for this TargetHttpsProxy resource. This setting determines * whether the load balancer attempts to negotiate QUIC with clients. You can specify NONE, * ENABLE, orDISABLE. - When quic-override is set to NONE, Google manages whether QUIC * is used. - When quic-override is set to ENABLE, the load balancer uses QUIC when * possible. - When quic-override is set to DISABLE, the load balancer doesn't use QUIC. * - If the quic-override flag is not specified,NONE is implied. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String quicOverride; /** * [Output Only] URL of the region where the regional TargetHttpsProxy resides. This field is not * applicable to global TargetHttpsProxies. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String region; /** * [Output Only] Server-defined URL for the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * Optional. A URL referring to a networksecurity.ServerTlsPolicy resource that describes how the * proxy should authenticate inbound traffic. * * serverTlsPolicy only applies to a globalTargetHttpsProxy attached toglobalForwardingRules with * theloadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL orEXTERNAL_MANAGED or * INTERNAL_MANAGED. It also applies to a regional TargetHttpsProxy attached to regional * forwardingRules with theloadBalancingScheme set to EXTERNAL_MANAGED orINTERNAL_MANAGED. For * details whichServerTlsPolicy resources are accepted withINTERNAL_SELF_MANAGED and which with * EXTERNAL,INTERNAL_MANAGED, EXTERNAL_MANAGEDloadBalancingScheme consult ServerTlsPolicy * documentation. * * If left blank, communications are not encrypted. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String serverTlsPolicy; /** * URLs to SslCertificate resources that are used to authenticate connections between users and * the load balancer. At least one SSL certificate must be specified. SslCertificates do not apply * when the load balancing scheme is set to INTERNAL_SELF_MANAGED. * * The URLs should refer to a SSL Certificate resource or Certificate Manager Certificate * resource. Mixing Classic Certificates and Certificate Manager Certificates is not allowed. * Certificate Manager Certificates must include the certificatemanager API namespace. Using * Certificate Manager Certificates in this field is not supported by Global external Application * Load Balancer or Classic Application Load Balancer, use certificate_map instead. * * Currently, you may specify up to 15 Classic SSL Certificates or up to 100 Certificate Manager * Certificates. * * Certificate Manager Certificates accepted formats are: - //certificatemanager.googleapis * .com/projects/{project}/locations/{location}/certificates/{resourceName}. - https://certific * atemanager.googleapis.com/v1alpha1/projects/{project}/locations/{location}/certificates/{resour * ceName}. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> sslCertificates; /** * URL of SslPolicy resource that will be associated with the TargetHttpsProxy resource. If not * set, the TargetHttpsProxy resource has no SSL policy configured. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sslPolicy; /** * Specifies whether TLS 1.3 0-RTT Data ("Early Data") should be accepted for this service. Early * Data allows a TLS resumption handshake to include the initial application payload (a HTTP * request) alongside the handshake, reducing the effective round trips to "zero". This applies to * TLS 1.3 connections over TCP (HTTP/2) as well as over UDP (QUIC/h3). * * This can improve application performance, especially on networks where interruptions may be * common, such as on mobile. * * Requests with Early Data will have the "Early-Data" HTTP header set on the request, with a * value of "1", to allow the backend to determine whether Early Data was included. * * Note: TLS Early Data may allow requests to be replayed, as the data is sent to the backend * before the handshake has fully completed. Applications that allow idempotent HTTP methods to * make non-idempotent changes, such as a GET request updating a database, should not accept Early * Data on those requests, and reject requests with the "Early-Data: 1" HTTP header by returning a * HTTP 425 (Too Early) status code, in order to remain RFC compliant. * * The default value is DISABLED. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String tlsEarlyData; /** * A fully-qualified or valid partial URL to the UrlMap resource that defines the mapping from URL * to the BackendService. For example, the following are all valid URLs for specifying a URL map: * - https://www.googleapis.compute/v1/projects/project/global/urlMaps/url-map - * projects/project/global/urlMaps/url-map - global/urlMaps/url-map * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String urlMap; /** * Optional. A URL referring to a networksecurity.AuthorizationPolicy resource that describes how * the proxy should authorize inbound traffic. If left blank, access will not be restricted by an * authorization policy. * * Refer to the AuthorizationPolicy resource for additional details. * * authorizationPolicy only applies to a globalTargetHttpsProxy attached toglobalForwardingRules * with theloadBalancingScheme set to INTERNAL_SELF_MANAGED. * * Note: This field currently has no impact. * @return value or {@code null} for none */ public java.lang.String getAuthorizationPolicy() { return authorizationPolicy; } /** * Optional. A URL referring to a networksecurity.AuthorizationPolicy resource that describes how * the proxy should authorize inbound traffic. If left blank, access will not be restricted by an * authorization policy. * * Refer to the AuthorizationPolicy resource for additional details. * * authorizationPolicy only applies to a globalTargetHttpsProxy attached toglobalForwardingRules * with theloadBalancingScheme set to INTERNAL_SELF_MANAGED. * * Note: This field currently has no impact. * @param authorizationPolicy authorizationPolicy or {@code null} for none */ public TargetHttpsProxy setAuthorizationPolicy(java.lang.String authorizationPolicy) { this.authorizationPolicy = authorizationPolicy; return this; } /** * URL of a certificate map that identifies a certificate map associated with the given target * proxy. This field can only be set for Global external Application Load Balancer or Classic * Application Load Balancer. For other products use Certificate Manager Certificates instead. * * If set, sslCertificates will be ignored. * * Accepted format is//certificatemanager.googleapis.com/projects/{project}/locations/{location}/ * certificateMaps/{resourceName}. * @return value or {@code null} for none */ public java.lang.String getCertificateMap() { return certificateMap; } /** * URL of a certificate map that identifies a certificate map associated with the given target * proxy. This field can only be set for Global external Application Load Balancer or Classic * Application Load Balancer. For other products use Certificate Manager Certificates instead. * * If set, sslCertificates will be ignored. * * Accepted format is//certificatemanager.googleapis.com/projects/{project}/locations/{location}/ * certificateMaps/{resourceName}. * @param certificateMap certificateMap or {@code null} for none */ public TargetHttpsProxy setCertificateMap(java.lang.String certificateMap) { this.certificateMap = certificateMap; return this; } /** * [Output Only] Creation timestamp inRFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp inRFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public TargetHttpsProxy setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public TargetHttpsProxy setDescription(java.lang.String description) { this.description = description; return this; } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpsProxy. An up-to- * date fingerprint must be provided in order to patch the TargetHttpsProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpsProxy. * @see #decodeFingerprint() * @return value or {@code null} for none */ public java.lang.String getFingerprint() { return fingerprint; } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpsProxy. An up-to- * date fingerprint must be provided in order to patch the TargetHttpsProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpsProxy. * @see #getFingerprint() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeFingerprint() { return com.google.api.client.util.Base64.decodeBase64(fingerprint); } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpsProxy. An up-to- * date fingerprint must be provided in order to patch the TargetHttpsProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpsProxy. * @see #encodeFingerprint() * @param fingerprint fingerprint or {@code null} for none */ public TargetHttpsProxy setFingerprint(java.lang.String fingerprint) { this.fingerprint = fingerprint; return this; } /** * Fingerprint of this resource. A hash of the contents stored in this object. This field is used * in optimistic locking. This field will be ignored when inserting a TargetHttpsProxy. An up-to- * date fingerprint must be provided in order to patch the TargetHttpsProxy; otherwise, the * request will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() * request to retrieve the TargetHttpsProxy. * @see #setFingerprint() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public TargetHttpsProxy encodeFingerprint(byte[] fingerprint) { this.fingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(fingerprint); return this; } /** * Specifies how long to keep a connection open, after completing a response, while there is no * matching traffic (in seconds). If an HTTP keep-alive is not specified, a default value (610 * seconds) will be used. * * For global external Application Load Balancers, the minimum allowed value is 5 seconds and the * maximum allowed value is 1200 seconds. * * For classic Application Load Balancers, this option is not supported. * @return value or {@code null} for none */ public java.lang.Integer getHttpKeepAliveTimeoutSec() { return httpKeepAliveTimeoutSec; } /** * Specifies how long to keep a connection open, after completing a response, while there is no * matching traffic (in seconds). If an HTTP keep-alive is not specified, a default value (610 * seconds) will be used. * * For global external Application Load Balancers, the minimum allowed value is 5 seconds and the * maximum allowed value is 1200 seconds. * * For classic Application Load Balancers, this option is not supported. * @param httpKeepAliveTimeoutSec httpKeepAliveTimeoutSec or {@code null} for none */ public TargetHttpsProxy setHttpKeepAliveTimeoutSec(java.lang.Integer httpKeepAliveTimeoutSec) { this.httpKeepAliveTimeoutSec = httpKeepAliveTimeoutSec; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public TargetHttpsProxy setId(java.math.BigInteger id) { this.id = id; return this; } /** * [Output Only] Type of resource. Alwayscompute#targetHttpsProxy for target HTTPS proxies. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of resource. Alwayscompute#targetHttpsProxy for target HTTPS proxies. * @param kind kind or {@code null} for none */ public TargetHttpsProxy setKind(java.lang.String kind) { this.kind = kind; return this; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Name of the resource. Provided by the client when the resource is created. The name must be * 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters * long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first * character must be a lowercase letter, and all following characters must be a dash, lowercase * letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public TargetHttpsProxy setName(java.lang.String name) { this.name = name; return this; } /** * This field only applies when the forwarding rule that references this target proxy has a * loadBalancingScheme set toINTERNAL_SELF_MANAGED. * * When this field is set to true, Envoy proxies set up inbound traffic interception and bind to * the IP address and port specified in the forwarding rule. This is generally useful when using * Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar * proxy). The Envoy proxy listens for inbound requests and handles requests when it receives * them. * * The default is false. * @return value or {@code null} for none */ public java.lang.Boolean getProxyBind() { return proxyBind; } /** * This field only applies when the forwarding rule that references this target proxy has a * loadBalancingScheme set toINTERNAL_SELF_MANAGED. * * When this field is set to true, Envoy proxies set up inbound traffic interception and bind to * the IP address and port specified in the forwarding rule. This is generally useful when using * Traffic Director to configure Envoy as a gateway or middle proxy (in other words, not a sidecar * proxy). The Envoy proxy listens for inbound requests and handles requests when it receives * them. * * The default is false. * @param proxyBind proxyBind or {@code null} for none */ public TargetHttpsProxy setProxyBind(java.lang.Boolean proxyBind) { this.proxyBind = proxyBind; return this; } /** * Specifies the QUIC override policy for this TargetHttpsProxy resource. This setting determines * whether the load balancer attempts to negotiate QUIC with clients. You can specify NONE, * ENABLE, orDISABLE. - When quic-override is set to NONE, Google manages whether QUIC * is used. - When quic-override is set to ENABLE, the load balancer uses QUIC when * possible. - When quic-override is set to DISABLE, the load balancer doesn't use QUIC. * - If the quic-override flag is not specified,NONE is implied. * @return value or {@code null} for none */ public java.lang.String getQuicOverride() { return quicOverride; } /** * Specifies the QUIC override policy for this TargetHttpsProxy resource. This setting determines * whether the load balancer attempts to negotiate QUIC with clients. You can specify NONE, * ENABLE, orDISABLE. - When quic-override is set to NONE, Google manages whether QUIC * is used. - When quic-override is set to ENABLE, the load balancer uses QUIC when * possible. - When quic-override is set to DISABLE, the load balancer doesn't use QUIC. * - If the quic-override flag is not specified,NONE is implied. * @param quicOverride quicOverride or {@code null} for none */ public TargetHttpsProxy setQuicOverride(java.lang.String quicOverride) { this.quicOverride = quicOverride; return this; } /** * [Output Only] URL of the region where the regional TargetHttpsProxy resides. This field is not * applicable to global TargetHttpsProxies. * @return value or {@code null} for none */ public java.lang.String getRegion() { return region; } /** * [Output Only] URL of the region where the regional TargetHttpsProxy resides. This field is not * applicable to global TargetHttpsProxies. * @param region region or {@code null} for none */ public TargetHttpsProxy setRegion(java.lang.String region) { this.region = region; return this; } /** * [Output Only] Server-defined URL for the resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for the resource. * @param selfLink selfLink or {@code null} for none */ public TargetHttpsProxy setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * Optional. A URL referring to a networksecurity.ServerTlsPolicy resource that describes how the * proxy should authenticate inbound traffic. * * serverTlsPolicy only applies to a globalTargetHttpsProxy attached toglobalForwardingRules with * theloadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL orEXTERNAL_MANAGED or * INTERNAL_MANAGED. It also applies to a regional TargetHttpsProxy attached to regional * forwardingRules with theloadBalancingScheme set to EXTERNAL_MANAGED orINTERNAL_MANAGED. For * details whichServerTlsPolicy resources are accepted withINTERNAL_SELF_MANAGED and which with * EXTERNAL,INTERNAL_MANAGED, EXTERNAL_MANAGEDloadBalancingScheme consult ServerTlsPolicy * documentation. * * If left blank, communications are not encrypted. * @return value or {@code null} for none */ public java.lang.String getServerTlsPolicy() { return serverTlsPolicy; } /** * Optional. A URL referring to a networksecurity.ServerTlsPolicy resource that describes how the * proxy should authenticate inbound traffic. * * serverTlsPolicy only applies to a globalTargetHttpsProxy attached toglobalForwardingRules with * theloadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL orEXTERNAL_MANAGED or * INTERNAL_MANAGED. It also applies to a regional TargetHttpsProxy attached to regional * forwardingRules with theloadBalancingScheme set to EXTERNAL_MANAGED orINTERNAL_MANAGED. For * details whichServerTlsPolicy resources are accepted withINTERNAL_SELF_MANAGED and which with * EXTERNAL,INTERNAL_MANAGED, EXTERNAL_MANAGEDloadBalancingScheme consult ServerTlsPolicy * documentation. * * If left blank, communications are not encrypted. * @param serverTlsPolicy serverTlsPolicy or {@code null} for none */ public TargetHttpsProxy setServerTlsPolicy(java.lang.String serverTlsPolicy) { this.serverTlsPolicy = serverTlsPolicy; return this; } /** * URLs to SslCertificate resources that are used to authenticate connections between users and * the load balancer. At least one SSL certificate must be specified. SslCertificates do not apply * when the load balancing scheme is set to INTERNAL_SELF_MANAGED. * * The URLs should refer to a SSL Certificate resource or Certificate Manager Certificate * resource. Mixing Classic Certificates and Certificate Manager Certificates is not allowed. * Certificate Manager Certificates must include the certificatemanager API namespace. Using * Certificate Manager Certificates in this field is not supported by Global external Application * Load Balancer or Classic Application Load Balancer, use certificate_map instead. * * Currently, you may specify up to 15 Classic SSL Certificates or up to 100 Certificate Manager * Certificates. * * Certificate Manager Certificates accepted formats are: - //certificatemanager.googleapis * .com/projects/{project}/locations/{location}/certificates/{resourceName}. - https://certific * atemanager.googleapis.com/v1alpha1/projects/{project}/locations/{location}/certificates/{resour * ceName}. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getSslCertificates() { return sslCertificates; } /** * URLs to SslCertificate resources that are used to authenticate connections between users and * the load balancer. At least one SSL certificate must be specified. SslCertificates do not apply * when the load balancing scheme is set to INTERNAL_SELF_MANAGED. * * The URLs should refer to a SSL Certificate resource or Certificate Manager Certificate * resource. Mixing Classic Certificates and Certificate Manager Certificates is not allowed. * Certificate Manager Certificates must include the certificatemanager API namespace. Using * Certificate Manager Certificates in this field is not supported by Global external Application * Load Balancer or Classic Application Load Balancer, use certificate_map instead. * * Currently, you may specify up to 15 Classic SSL Certificates or up to 100 Certificate Manager * Certificates. * * Certificate Manager Certificates accepted formats are: - //certificatemanager.googleapis * .com/projects/{project}/locations/{location}/certificates/{resourceName}. - https://certific * atemanager.googleapis.com/v1alpha1/projects/{project}/locations/{location}/certificates/{resour * ceName}. * @param sslCertificates sslCertificates or {@code null} for none */ public TargetHttpsProxy setSslCertificates(java.util.List<java.lang.String> sslCertificates) { this.sslCertificates = sslCertificates; return this; } /** * URL of SslPolicy resource that will be associated with the TargetHttpsProxy resource. If not * set, the TargetHttpsProxy resource has no SSL policy configured. * @return value or {@code null} for none */ public java.lang.String getSslPolicy() { return sslPolicy; } /** * URL of SslPolicy resource that will be associated with the TargetHttpsProxy resource. If not * set, the TargetHttpsProxy resource has no SSL policy configured. * @param sslPolicy sslPolicy or {@code null} for none */ public TargetHttpsProxy setSslPolicy(java.lang.String sslPolicy) { this.sslPolicy = sslPolicy; return this; } /** * Specifies whether TLS 1.3 0-RTT Data ("Early Data") should be accepted for this service. Early * Data allows a TLS resumption handshake to include the initial application payload (a HTTP * request) alongside the handshake, reducing the effective round trips to "zero". This applies to * TLS 1.3 connections over TCP (HTTP/2) as well as over UDP (QUIC/h3). * * This can improve application performance, especially on networks where interruptions may be * common, such as on mobile. * * Requests with Early Data will have the "Early-Data" HTTP header set on the request, with a * value of "1", to allow the backend to determine whether Early Data was included. * * Note: TLS Early Data may allow requests to be replayed, as the data is sent to the backend * before the handshake has fully completed. Applications that allow idempotent HTTP methods to * make non-idempotent changes, such as a GET request updating a database, should not accept Early * Data on those requests, and reject requests with the "Early-Data: 1" HTTP header by returning a * HTTP 425 (Too Early) status code, in order to remain RFC compliant. * * The default value is DISABLED. * @return value or {@code null} for none */ public java.lang.String getTlsEarlyData() { return tlsEarlyData; } /** * Specifies whether TLS 1.3 0-RTT Data ("Early Data") should be accepted for this service. Early * Data allows a TLS resumption handshake to include the initial application payload (a HTTP * request) alongside the handshake, reducing the effective round trips to "zero". This applies to * TLS 1.3 connections over TCP (HTTP/2) as well as over UDP (QUIC/h3). * * This can improve application performance, especially on networks where interruptions may be * common, such as on mobile. * * Requests with Early Data will have the "Early-Data" HTTP header set on the request, with a * value of "1", to allow the backend to determine whether Early Data was included. * * Note: TLS Early Data may allow requests to be replayed, as the data is sent to the backend * before the handshake has fully completed. Applications that allow idempotent HTTP methods to * make non-idempotent changes, such as a GET request updating a database, should not accept Early * Data on those requests, and reject requests with the "Early-Data: 1" HTTP header by returning a * HTTP 425 (Too Early) status code, in order to remain RFC compliant. * * The default value is DISABLED. * @param tlsEarlyData tlsEarlyData or {@code null} for none */ public TargetHttpsProxy setTlsEarlyData(java.lang.String tlsEarlyData) { this.tlsEarlyData = tlsEarlyData; return this; } /** * A fully-qualified or valid partial URL to the UrlMap resource that defines the mapping from URL * to the BackendService. For example, the following are all valid URLs for specifying a URL map: * - https://www.googleapis.compute/v1/projects/project/global/urlMaps/url-map - * projects/project/global/urlMaps/url-map - global/urlMaps/url-map * @return value or {@code null} for none */ public java.lang.String getUrlMap() { return urlMap; } /** * A fully-qualified or valid partial URL to the UrlMap resource that defines the mapping from URL * to the BackendService. For example, the following are all valid URLs for specifying a URL map: * - https://www.googleapis.compute/v1/projects/project/global/urlMaps/url-map - * projects/project/global/urlMaps/url-map - global/urlMaps/url-map * @param urlMap urlMap or {@code null} for none */ public TargetHttpsProxy setUrlMap(java.lang.String urlMap) { this.urlMap = urlMap; return this; } @Override public TargetHttpsProxy set(String fieldName, Object value) { return (TargetHttpsProxy) super.set(fieldName, value); } @Override public TargetHttpsProxy clone() { return (TargetHttpsProxy) super.clone(); } }
apache/cxf
35,096
systests/transports/src/test/java/org/apache/cxf/systest/https/conduit/HTTPSConduitTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.systest.https.conduit; import java.io.IOException; import java.io.InputStream; import java.net.Socket; import java.net.URI; import java.net.URL; import java.security.GeneralSecurityException; import java.security.KeyStore; import java.security.SecureRandom; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Random; import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.KeyManager; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLSession; import javax.net.ssl.TrustManager; import javax.net.ssl.TrustManagerFactory; import javax.net.ssl.X509ExtendedTrustManager; import javax.xml.namespace.QName; import jakarta.xml.ws.BindingProvider; import org.apache.cxf.Bus; import org.apache.cxf.BusFactory; import org.apache.cxf.bus.spring.BusApplicationContext; import org.apache.cxf.bus.spring.SpringBusFactory; import org.apache.cxf.common.classloader.ClassLoaderUtils; import org.apache.cxf.configuration.jsse.TLSClientParameters; import org.apache.cxf.configuration.security.AuthorizationPolicy; import org.apache.cxf.endpoint.Client; import org.apache.cxf.frontend.ClientProxy; import org.apache.cxf.message.Message; import org.apache.cxf.systest.https.BusServer; import org.apache.cxf.testutil.common.AbstractBusClientServerTestBase; import org.apache.cxf.transport.http.HTTPConduit; import org.apache.cxf.transport.http.MessageTrustDecider; import org.apache.cxf.transport.http.URLConnectionInfo; import org.apache.cxf.transport.http.UntrustedURLConnectionIOException; import org.apache.cxf.transport.http.auth.DefaultBasicAuthSupplier; import org.apache.cxf.transport.http.auth.HttpAuthHeader; import org.apache.cxf.transport.http.auth.HttpAuthSupplier; import org.apache.cxf.transport.https.HttpsURLConnectionInfo; import org.apache.cxf.transports.http.configuration.HTTPClientPolicy; import org.apache.hello_world.Greeter; import org.apache.hello_world.services.SOAPService; import org.springframework.context.ApplicationContext; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * This class tests several issues and Conduit policies based * on a set up of redirecting servers. * <pre> * * Http Redirection: * * Poltim(https:9005) ----> Mortimer (http:9000) * * HttpS redirection/Trust: * * Tarpin(https:9003) ----> Gordy(https:9001) ----> Bethal(https:9002) * * Hostname Verifier Test * * Morpit (https:9008) * * </pre>HTTPConduitTest * The Bethal server issues 401 with differing realms depending on the * User name given in the authorization header. * <p> * The Morpit has a CN that is not equal to "localhost" to kick in * the Hostname Verifier. */ public class HTTPSConduitTest extends AbstractBusClientServerTestBase { private static final boolean IN_PROCESS = true; private static TLSClientParameters tlsClientParameters = new TLSClientParameters(); private static List<String> servers = new ArrayList<>(); private static Map<String, Collection<String>> addrMap = new TreeMap<>(); static { try (InputStream key = ClassLoaderUtils.getResourceAsStream("keys/Morpit.jks", HTTPSConduitTest.class); InputStream truststore = ClassLoaderUtils.getResourceAsStream("keys/Truststore.jks", HTTPSConduitTest.class);) { //System.setProperty("javax.net.debug", "all"); KeyManager[] kmgrs = getKeyManagers(getKeyStore("JKS", key, "password"), "password"); TrustManager[] tmgrs = getTrustManagers(getKeyStore("JKS", truststore, "password")); tlsClientParameters.setKeyManagers(kmgrs); tlsClientParameters.setTrustManagers(tmgrs); } catch (Exception e) { throw new RuntimeException("Static initialization failed", e); } } private final QName serviceName = new QName("http://apache.org/hello_world", "SOAPService"); private final QName bethalQ = new QName("http://apache.org/hello_world", "Bethal"); private final QName gordyQ = new QName("http://apache.org/hello_world", "Gordy"); private final QName tarpinQ = new QName("http://apache.org/hello_world", "Tarpin"); private final QName poltimQ = new QName("http://apache.org/hello_world", "Poltim"); public HTTPSConduitTest() { } public static String getPort(String s) { return BusServer.PORTMAP.get(s); } @BeforeClass public static void allocatePorts() { BusServer.resetPortMap(); addrMap.clear(); addrMap.put("Mortimer", List.of("http://localhost:" + getPort("PORT0") + "/")); addrMap.put("Tarpin", List.of("https://localhost:" + getPort("PORT1") + "/")); addrMap.put("Poltim", List.of("https://localhost:" + getPort("PORT2") + "/")); addrMap.put("Gordy", List.of("https://localhost:" + getPort("PORT3") + "/")); addrMap.put("Bethal", List.of("https://localhost:" + getPort("PORT4") + "/", "https://localhost:" + getPort("PORT6") + "/")); addrMap.put("Morpit", List.of("https://localhost:" + getPort("PORT5") + "/")); tlsClientParameters.setDisableCNCheck(true); servers.clear(); } /** * This function is used to start up a server. It only "starts" a * server if it hasn't been started before, hence its static nature. * <p> * This approach is used to start the needed servers for a particular test * instead of starting them all in "startServers". This single needed * server approach allieviates the pain in starting them all just to run * a particular test in the debugger. */ public synchronized boolean startServer(String name) { if (servers.contains(name)) { return true; } Bus bus = BusFactory.getThreadDefaultBus(false); URL serverC = Server.class.getResource(name + ".cxf"); BusFactory.setDefaultBus(null); BusFactory.setThreadDefaultBus(null); boolean server = launchServer(Server.class, null, new String[] { name, addrMap.get(name).stream().collect(Collectors.joining(",")), serverC.toString() }, IN_PROCESS); if (server) { servers.add(name); } BusFactory.setDefaultBus(null); BusFactory.setThreadDefaultBus(bus); return server; } @AfterClass public static void cleanUp() { Bus b = BusFactory.getDefaultBus(false); if (b != null) { b.shutdown(true); } b = BusFactory.getThreadDefaultBus(false); if (b != null) { b.shutdown(true); } } public static KeyStore getKeyStore(String ksType, InputStream inputStream, String ksPassword) throws GeneralSecurityException, IOException { String type = ksType != null ? ksType : KeyStore.getDefaultType(); char[] password = ksPassword != null ? ksPassword.toCharArray() : null; // We just use the default Keystore provider KeyStore keyStore = KeyStore.getInstance(type); keyStore.load(inputStream, password); return keyStore; } public static KeyManager[] getKeyManagers(KeyStore keyStore, String keyPassword) throws GeneralSecurityException, IOException { // For tests, we just use the default algorithm String alg = KeyManagerFactory.getDefaultAlgorithm(); char[] keyPass = keyPassword != null ? keyPassword.toCharArray() : null; // For tests, we just use the default provider. KeyManagerFactory fac = KeyManagerFactory.getInstance(alg); fac.init(keyStore, keyPass); return fac.getKeyManagers(); } public static TrustManager[] getTrustManagers(KeyStore keyStore) throws GeneralSecurityException, IOException { // For tests, we just use the default algorithm String alg = TrustManagerFactory.getDefaultAlgorithm(); // For tests, we just use the default provider. TrustManagerFactory fac = TrustManagerFactory.getInstance(alg); fac.init(keyStore); return fac.getTrustManagers(); } //methods that a subclass can override to inject a Proxy into the flow //and assert the proxy was appropriately called protected void configureProxy(Client c) { } protected void resetProxyCount() { } protected void assertProxyRequestCount(int i) { } /** * We use this class to reset the default bus. * Note: This may not always work in the future. * I was lucky in that "defaultBus" is actually a * protected static. */ class DefaultBusFactory extends SpringBusFactory { public Bus createBus(URL config) { Bus bus = super.createBus(config, true); BusFactory.setDefaultBus(bus); BusFactory.setThreadDefaultBus(bus); return bus; } } /** * This methods tests a basic https connection to Bethal. * It supplies an authorization policy with preemptive user/pass * to avoid the 401. */ @Test public void testHttpsBasicConnectionWithConfig() throws Exception { startServer("Bethal"); URL config = getClass().getResource("BethalClientConfig.cxf"); // We go through the back door, setting the default bus. new DefaultBusFactory().createBus(config); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter bethal = service.getPort(bethalQ, Greeter.class); assertNotNull("Port is null", bethal); updateAddressPort(bethal, getPort("PORT4")); verifyBethalClient(bethal); } @Test public void testGetClientFromSpringContext() throws Exception { startServer("Bethal"); BusFactory.setDefaultBus(null); // The client bean configuration file URL beans = getClass().getResource("BethalClientBeans.xml"); // We go through the back door, setting the default bus. Bus bus = new DefaultBusFactory().createBus(beans); ApplicationContext context = bus.getExtension(BusApplicationContext.class); Greeter bethal = (Greeter)context.getBean("Bethal"); updateAddressPort(bethal, getPort("PORT4")); // verify the client side's setting verifyBethalClient(bethal); } // we just verify the configurations are loaded successfully private void verifyBethalClient(Greeter bethal) { Client client = ClientProxy.getClient(bethal); HTTPConduit http = (HTTPConduit) client.getConduit(); HTTPClientPolicy httpClientPolicy = http.getClient(); assertTrue("the httpClientPolicy's autoRedirect should be true", httpClientPolicy.isAutoRedirect()); TLSClientParameters tlsParameters = http.getTlsClientParameters(); assertNotNull("the http conduit's tlsParameters should not be null", tlsParameters); // If we set any name, but Edward, Mary, or George, // and a password of "password" we will get through // Bethal. AuthorizationPolicy authPolicy = http.getAuthorization(); assertEquals("Set the wrong user name from the configuration", "Betty", authPolicy.getUserName()); assertEquals("Set the wrong pass word form the configuration", "password", authPolicy.getPassword()); configureProxy(ClientProxy.getClient(bethal)); String answer = bethal.sayHi(); answer = bethal.sayHi(); answer = bethal.sayHi(); answer = bethal.sayHi(); answer = bethal.sayHi(); assertTrue("Unexpected answer: " + answer, "Bonjour from Bethal".equals(answer)); //With HTTPS, it will just be a CONNECT to the proxy and all the //data is encrypted. Thus, the proxy cannot distinquish the requests assertProxyRequestCount(0); } /** * This methods tests a basic https connection to Bethal. * It supplies an authorization policy with premetive user/pass * to avoid the 401. */ @Test public void testHttpsBasicConnection() throws Exception { // Use common/shared TLSClientParameters testHttpsBasicConnection(tlsClientParameters); } @Test public void testHttpsBasicConnectionCustomSslContext() throws Exception { // Use custom SSLContext registered in TLSClientParameters SSLContext ctx = SSLContext.getInstance("TLSv1.3"); try (InputStream keyStoreIs = ClassLoaderUtils.getResourceAsStream( "keys/Morpit.jks", HTTPSConduitTest.class )) { KeyManager[] keyManagers = getKeyManagers(getKeyStore( "JKS", keyStoreIs, "password"), "password" ); // I need to disable CN verification (as certificate contains Bethal as CN), // but I cannot use TLSClientParameters.setDisableCNCheck(), because in this case // URLCONNECTION is always used (see HttpClientHTTPConduit.setupConnection()) // -> I must used own TrustManager without verification TrustManager trustManager = new X509ExtendedTrustManager() { @Override public X509Certificate[] getAcceptedIssuers() { return new X509Certificate[] {}; } @Override public void checkClientTrusted(X509Certificate[] chain, String authType) { } @Override public void checkServerTrusted(X509Certificate[] chain, String authType) { } @Override public void checkClientTrusted(X509Certificate[] chain, String authType, Socket socket) { } @Override public void checkServerTrusted(X509Certificate[] chain, String authType, Socket socket) { } @Override public void checkClientTrusted(X509Certificate[] chain, String authType, SSLEngine engine) { } @Override public void checkServerTrusted(X509Certificate[] chain, String authType, SSLEngine engine) { } }; ctx.init( keyManagers, new TrustManager[] {trustManager}, SecureRandom.getInstance("SHA1PRNG") ); } // HostnameVerifier (disable host name verification) class AllowAllHostnameVerifier implements HostnameVerifier { @Override public boolean verify(String host, SSLSession session) { try { Certificate[] certs = session.getPeerCertificates(); return certs != null && certs[0] instanceof X509Certificate; } catch (SSLPeerUnverifiedException e) { return false; } } } // TLSClientParameters (Custom SSLContext) TLSClientParameters tlsClientParams = new TLSClientParameters(); tlsClientParams.setSslContext(ctx); // TLSClientParameters (disable host name verification - now needed only when URLConnection is used) tlsClientParams.setHostnameVerifier(new AllowAllHostnameVerifier()); testHttpsBasicConnection(tlsClientParams); } private void testHttpsBasicConnection(TLSClientParameters tlsClientParams) throws Exception { startServer("Bethal"); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter bethal = service.getPort(bethalQ, Greeter.class); assertNotNull("Port is null", bethal); updateAddressPort(bethal, getPort("PORT4")); // Okay, I'm sick of configuration files. // This also tests dynamic configuration of the conduit. Client client = ClientProxy.getClient(bethal); client.getRequestContext().put("share.httpclient.http.conduit", false); HTTPConduit http = (HTTPConduit) client.getConduit(); HTTPClientPolicy httpClientPolicy = new HTTPClientPolicy(); httpClientPolicy.setAutoRedirect(false); // If we set any name, but Edward, Mary, or George, // and a password of "password" we will get through // Bethal. AuthorizationPolicy authPolicy = new AuthorizationPolicy(); authPolicy.setUserName("Betty"); authPolicy.setPassword("password"); http.setClient(httpClientPolicy); http.setTlsClientParameters(tlsClientParams); http.setAuthorization(authPolicy); configureProxy(client); String answer = bethal.sayHi(); assertTrue("Unexpected answer: " + answer, "Bonjour from Bethal".equals(answer)); assertProxyRequestCount(0); } @Test public void testHttpsRedirectToHttpFail() throws Exception { startServer("Mortimer"); startServer("Poltim"); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter poltim = service.getPort(poltimQ, Greeter.class); assertNotNull("Port is null", poltim); updateAddressPort(poltim, getPort("PORT2")); // Okay, I'm sick of configuration files. // This also tests dynamic configuration of the conduit. Client client = ClientProxy.getClient(poltim); client.getRequestContext().put("share.httpclient.http.conduit", false); HTTPConduit http = (HTTPConduit) client.getConduit(); HTTPClientPolicy httpClientPolicy = new HTTPClientPolicy(); httpClientPolicy.setAutoRedirect(true); http.setClient(httpClientPolicy); http.setTlsClientParameters(tlsClientParameters); configureProxy(client); poltim.sayHi(); //client -> poltim is https and thus not recorded but then redirected to mortimer //client -> mortimer is http and recoreded assertProxyRequestCount(1); } class MyHttpsTrustDecider extends MessageTrustDecider { private String[] trustName; private int called; MyHttpsTrustDecider(String name) { trustName = new String[] {name}; } MyHttpsTrustDecider(String[] name) { trustName = name; } public int wasCalled() { return called; } public void establishTrust( String conduitName, URLConnectionInfo cinfo, Message message ) throws UntrustedURLConnectionIOException { called++; HttpsURLConnectionInfo ci = (HttpsURLConnectionInfo) cinfo; boolean trusted = false; for (int i = 0; i < trustName.length; i++) { trusted = trusted || ci.getPeerPrincipal() .toString().contains("OU=" + trustName[i]); } if (!trusted) { throw new UntrustedURLConnectionIOException( "Peer Principal \"" + ci.getPeerPrincipal() + "\" does not contain " + getTrustNames()); } } private String getTrustNames() { StringBuilder sb = new StringBuilder(); for (int i = 0; i < trustName.length; i++) { sb.append("\"OU="); sb.append(trustName[i]); sb.append('"'); if (i < trustName.length - 1) { sb.append(", "); } } return sb.toString(); } } @Test public void testHttpsTrust() throws Exception { startServer("Bethal"); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter bethal = service.getPort(bethalQ, Greeter.class); assertNotNull("Port is null", bethal); updateAddressPort(bethal, getPort("PORT4")); // Okay, I'm sick of configuration files. // This also tests dynamic configuration of the conduit. Client client = ClientProxy.getClient(bethal); client.getRequestContext().put("share.httpclient.http.conduit", false); HTTPConduit http = (HTTPConduit) client.getConduit(); HTTPClientPolicy httpClientPolicy = new HTTPClientPolicy(); httpClientPolicy.setAutoRedirect(false); // If we set any name, but Edward, Mary, or George, // and a password of "password" we will get through // Bethal. AuthorizationPolicy authPolicy = new AuthorizationPolicy(); authPolicy.setUserName("Betty"); authPolicy.setPassword("password"); http.setClient(httpClientPolicy); http.setTlsClientParameters(tlsClientParameters); http.setAuthorization(authPolicy); // Our expected server should be OU=Bethal http.setTrustDecider(new MyHttpsTrustDecider("Bethal")); configureProxy(client); String answer = bethal.sayHi(); assertTrue("Unexpected answer: " + answer, "Bonjour from Bethal".equals(answer)); assertProxyRequestCount(0); // Nobody will not equal OU=Bethal MyHttpsTrustDecider trustDecider = new MyHttpsTrustDecider("Nobody"); http.setTrustDecider(trustDecider); try { answer = bethal.sayHi(); fail("Unexpected answer from Bethal: " + answer); } catch (Exception e) { //e.printStackTrace(); //assertTrue("Trust Decider was not called", // 0 > trustDecider.wasCalled()); } assertProxyRequestCount(0); } @Test public void testHttpsTrustRedirect() throws Exception { startServer("Tarpin"); startServer("Gordy"); startServer("Bethal"); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter tarpin = service.getPort(tarpinQ, Greeter.class); assertNotNull("Port is null", tarpin); updateAddressPort(tarpin, getPort("PORT1")); // Okay, I'm sick of configuration files. // This also tests dynamic configuration of the conduit. Client client = ClientProxy.getClient(tarpin); client.getRequestContext().put("share.httpclient.http.conduit", false); HTTPConduit http = (HTTPConduit) client.getConduit(); HTTPClientPolicy httpClientPolicy = new HTTPClientPolicy(); httpClientPolicy.setAutoRedirect(true); // If we set any name, but Edward, Mary, or George, // and a password of "password" we will get through // Bethal. AuthorizationPolicy authPolicy = new AuthorizationPolicy(); authPolicy.setUserName("Betty"); authPolicy.setPassword("password"); http.setClient(httpClientPolicy); http.setTlsClientParameters(tlsClientParameters); http.setAuthorization(authPolicy); // We get redirected from Tarpin, to Gordy, to Bethal. MyHttpsTrustDecider trustDecider = new MyHttpsTrustDecider( new String[] {"Tarpin", "Gordy", "Bethal"}); http.setTrustDecider(trustDecider); // We actually get our answer from Bethal at the end of the // redirects. configureProxy(ClientProxy.getClient(tarpin)); String answer = tarpin.sayHi(); assertProxyRequestCount(0); assertTrue("Trust Decider wasn't called correctly", 3 == trustDecider.wasCalled()); assertTrue("Unexpected answer: " + answer, "Bonjour from Bethal".equals(answer)); // Limit the redirects to 1, since there are two, this should fail. http.getClient().setMaxRetransmits(1); try { String a2 = tarpin.sayHi(); fail("Unexpected answer from Tarpin: " + a2); } catch (Exception e) { //e.printStackTrace(); } assertProxyRequestCount(0); // Set back to unlimited. http.getClient().setMaxRetransmits(-1); // Effectively we will not trust Gordy in the middle. trustDecider = new MyHttpsTrustDecider( new String[] {"Tarpin", "Bethal"}); http.setTrustDecider(trustDecider); try { String a2 = tarpin.sayHi(); fail("Unexpected answer from Tarpin: " + a2); } catch (Exception e) { //e.printStackTrace(); assertTrue("Trust Decider wasn't called correctly", 2 == trustDecider.wasCalled()); } assertProxyRequestCount(0); } public class MyBasicAuthSupplier implements HttpAuthSupplier { String realm; String user; String pass; /** * This will loop from Cronus, to Andromeda, to Zorantius */ MyBasicAuthSupplier() { } MyBasicAuthSupplier(String r, String u, String p) { realm = r; user = u; pass = p; } /** * If we don't have the realm set, then we loop * through the realms. */ public String getAuthorization( AuthorizationPolicy authPolicy, URI currentURI, Message message, String fullHeader ) { String reqestedRealm = new HttpAuthHeader(fullHeader).getRealm(); if (realm != null && realm.equals(reqestedRealm)) { return createUserPass(user, pass); } if ("Andromeda".equals(reqestedRealm)) { // This will get us another 401 to Zorantius return createUserPass("Edward", "password"); } if ("Zorantius".equals(reqestedRealm)) { // George will get us another 401 to Cronus return createUserPass("George", "password"); } if ("Cronus".equals(reqestedRealm)) { // Mary will get us another 401 to Andromeda return createUserPass("Mary", "password"); } return null; } private String createUserPass(String usr, String pwd) { return DefaultBasicAuthSupplier.getBasicAuthHeader(usr, pwd); } public boolean requiresRequestCaching() { return false; } } /** * This tests redirects through Gordy to Bethal. Bethal will * supply a series of 401s. See PushBack401. */ @Test public void testHttpsRedirect401Response() throws Exception { startServer("Gordy"); startServer("Bethal"); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter gordy = service.getPort(gordyQ, Greeter.class); assertNotNull("Port is null", gordy); updateAddressPort(gordy, getPort("PORT3")); // Okay, I'm sick of configuration files. // This also tests dynamic configuration of the conduit. Client client = ClientProxy.getClient(gordy); client.getRequestContext().put("share.httpclient.http.conduit", false); HTTPConduit http = (HTTPConduit) client.getConduit(); HTTPClientPolicy httpClientPolicy = new HTTPClientPolicy(); httpClientPolicy.setAutoRedirect(true); http.setClient(httpClientPolicy); http.setTlsClientParameters(tlsClientParameters); // We get redirected from Gordy, to Bethal. http.setTrustDecider( new MyHttpsTrustDecider( new String[] {"Gordy", "Bethal"})); // Without preemptive user/pass Bethal returns a // 401 for realm Cronus. If we supply any name other // than Edward, George, or Mary, with the pass of "password" // we should succeed. http.setAuthSupplier( new MyBasicAuthSupplier("Cronus", "Betty", "password")); // We actually get our answer from Bethal at the end of the // redirects. String answer = gordy.sayHi(); assertTrue("Unexpected answer: " + answer, "Bonjour from Bethal".equals(answer)); // The loop auth supplier, // We should die with looping realms. http.setAuthSupplier(new MyBasicAuthSupplier()); try { answer = gordy.sayHi(); fail("Unexpected answer from Gordy: " + answer); } catch (Exception e) { //e.printStackTrace(); } } @Test public void testUpdateAddress() throws Exception { startServer("Bethal"); URL config = getClass().getResource("BethalClientConfig.cxf"); // We go through the back door, setting the default bus. new DefaultBusFactory().createBus(config); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter bethal = service.getPort(bethalQ, Greeter.class); updateAddressPort(bethal, getPort("PORT4")); verifyBethalClient(bethal); updateAddressPort(bethal, getPort("PORT6")); verifyBethalClient(bethal); // setup the feature by using JAXWS front-end API final Collection<Future<String>> futures = new ArrayList<>(); final ExecutorService executor = Executors.newFixedThreadPool(10); final Random random = new Random(); try { for (int i = 0; i < 30; ++i) { futures.add(executor.submit(() -> { if (random.nextBoolean()) { updateAddressPort(bethal, getPort("PORT4")); } else { updateAddressPort(bethal, getPort("PORT6")); } return bethal.greetMe("timeout!"); })); } for (final Future<String> f: futures) { assertThat(f.get(10, TimeUnit.SECONDS), equalTo("Hello timeout!")); } } finally { executor.shutdown(); if (!executor.awaitTermination(30, TimeUnit.SECONDS)) { executor.shutdownNow(); } } } @Test public void testUpdateAddressNoClientReset() throws Exception { startServer("Bethal"); URL config = getClass().getResource("BethalClientConfig.cxf"); // We go through the back door, setting the default bus. new DefaultBusFactory().createBus(config); URL wsdl = getClass().getResource("greeting.wsdl"); assertNotNull("WSDL is null", wsdl); SOAPService service = new SOAPService(wsdl, serviceName); assertNotNull("Service is null", service); Greeter bethal = service.getPort(bethalQ, Greeter.class); ((BindingProvider)bethal).getRequestContext().put("https.reset.httpclient.http.conduit", false); updateAddressPort(bethal, getPort("PORT4")); verifyBethalClient(bethal); updateAddressPort(bethal, getPort("PORT6")); verifyBethalClient(bethal); // setup the feature by using JAXWS front-end API final Collection<Future<String>> futures = new ArrayList<>(); final ExecutorService executor = Executors.newFixedThreadPool(10); final Random random = new Random(); try { for (int i = 0; i < 30; ++i) { futures.add(executor.submit(() -> { if (random.nextBoolean()) { updateAddressPort(bethal, getPort("PORT4")); } else { updateAddressPort(bethal, getPort("PORT6")); } return bethal.greetMe("timeout!"); })); } for (final Future<String> f: futures) { assertThat(f.get(10, TimeUnit.SECONDS), equalTo("Hello timeout!")); } } finally { executor.shutdown(); if (!executor.awaitTermination(30, TimeUnit.SECONDS)) { executor.shutdownNow(); } } } }
googleapis/google-cloud-java
35,318
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/UpdateReasoningEngineRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/reasoning_engine_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Request message for * [ReasoningEngineService.UpdateReasoningEngine][google.cloud.aiplatform.v1beta1.ReasoningEngineService.UpdateReasoningEngine]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest} */ public final class UpdateReasoningEngineRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) UpdateReasoningEngineRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateReasoningEngineRequest.newBuilder() to construct. private UpdateReasoningEngineRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateReasoningEngineRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateReasoningEngineRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateReasoningEngineRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateReasoningEngineRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.class, com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.Builder.class); } private int bitField0_; public static final int REASONING_ENGINE_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.ReasoningEngine reasoningEngine_; /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the reasoningEngine field is set. */ @java.lang.Override public boolean hasReasoningEngine() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The reasoningEngine. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ReasoningEngine getReasoningEngine() { return reasoningEngine_ == null ? com.google.cloud.aiplatform.v1beta1.ReasoningEngine.getDefaultInstance() : reasoningEngine_; } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.ReasoningEngineOrBuilder getReasoningEngineOrBuilder() { return reasoningEngine_ == null ? com.google.cloud.aiplatform.v1beta1.ReasoningEngine.getDefaultInstance() : reasoningEngine_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getReasoningEngine()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getReasoningEngine()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest other = (com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) obj; if (hasReasoningEngine() != other.hasReasoningEngine()) return false; if (hasReasoningEngine()) { if (!getReasoningEngine().equals(other.getReasoningEngine())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasReasoningEngine()) { hash = (37 * hash) + REASONING_ENGINE_FIELD_NUMBER; hash = (53 * hash) + getReasoningEngine().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [ReasoningEngineService.UpdateReasoningEngine][google.cloud.aiplatform.v1beta1.ReasoningEngineService.UpdateReasoningEngine]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateReasoningEngineRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateReasoningEngineRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.class, com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getReasoningEngineFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; reasoningEngine_ = null; if (reasoningEngineBuilder_ != null) { reasoningEngineBuilder_.dispose(); reasoningEngineBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto .internal_static_google_cloud_aiplatform_v1beta1_UpdateReasoningEngineRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest build() { com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest buildPartial() { com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest result = new com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.reasoningEngine_ = reasoningEngineBuilder_ == null ? reasoningEngine_ : reasoningEngineBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest other) { if (other == com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest.getDefaultInstance()) return this; if (other.hasReasoningEngine()) { mergeReasoningEngine(other.getReasoningEngine()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getReasoningEngineFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1beta1.ReasoningEngine reasoningEngine_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ReasoningEngine, com.google.cloud.aiplatform.v1beta1.ReasoningEngine.Builder, com.google.cloud.aiplatform.v1beta1.ReasoningEngineOrBuilder> reasoningEngineBuilder_; /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the reasoningEngine field is set. */ public boolean hasReasoningEngine() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The reasoningEngine. */ public com.google.cloud.aiplatform.v1beta1.ReasoningEngine getReasoningEngine() { if (reasoningEngineBuilder_ == null) { return reasoningEngine_ == null ? com.google.cloud.aiplatform.v1beta1.ReasoningEngine.getDefaultInstance() : reasoningEngine_; } else { return reasoningEngineBuilder_.getMessage(); } } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setReasoningEngine(com.google.cloud.aiplatform.v1beta1.ReasoningEngine value) { if (reasoningEngineBuilder_ == null) { if (value == null) { throw new NullPointerException(); } reasoningEngine_ = value; } else { reasoningEngineBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setReasoningEngine( com.google.cloud.aiplatform.v1beta1.ReasoningEngine.Builder builderForValue) { if (reasoningEngineBuilder_ == null) { reasoningEngine_ = builderForValue.build(); } else { reasoningEngineBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeReasoningEngine(com.google.cloud.aiplatform.v1beta1.ReasoningEngine value) { if (reasoningEngineBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && reasoningEngine_ != null && reasoningEngine_ != com.google.cloud.aiplatform.v1beta1.ReasoningEngine.getDefaultInstance()) { getReasoningEngineBuilder().mergeFrom(value); } else { reasoningEngine_ = value; } } else { reasoningEngineBuilder_.mergeFrom(value); } if (reasoningEngine_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearReasoningEngine() { bitField0_ = (bitField0_ & ~0x00000001); reasoningEngine_ = null; if (reasoningEngineBuilder_ != null) { reasoningEngineBuilder_.dispose(); reasoningEngineBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ReasoningEngine.Builder getReasoningEngineBuilder() { bitField0_ |= 0x00000001; onChanged(); return getReasoningEngineFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1beta1.ReasoningEngineOrBuilder getReasoningEngineOrBuilder() { if (reasoningEngineBuilder_ != null) { return reasoningEngineBuilder_.getMessageOrBuilder(); } else { return reasoningEngine_ == null ? com.google.cloud.aiplatform.v1beta1.ReasoningEngine.getDefaultInstance() : reasoningEngine_; } } /** * * * <pre> * Required. The ReasoningEngine which replaces the resource on the server. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ReasoningEngine reasoning_engine = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ReasoningEngine, com.google.cloud.aiplatform.v1beta1.ReasoningEngine.Builder, com.google.cloud.aiplatform.v1beta1.ReasoningEngineOrBuilder> getReasoningEngineFieldBuilder() { if (reasoningEngineBuilder_ == null) { reasoningEngineBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.ReasoningEngine, com.google.cloud.aiplatform.v1beta1.ReasoningEngine.Builder, com.google.cloud.aiplatform.v1beta1.ReasoningEngineOrBuilder>( getReasoningEngine(), getParentForChildren(), isClean()); reasoningEngine_ = null; } return reasoningEngineBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Mask specifying which fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest) private static final com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest(); } public static com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateReasoningEngineRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateReasoningEngineRequest>() { @java.lang.Override public UpdateReasoningEngineRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateReasoningEngineRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateReasoningEngineRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.UpdateReasoningEngineRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/maven-checkstyle-plugin
35,196
src/main/java/org/apache/maven/plugins/checkstyle/CheckstyleViolationCheckMojo.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.maven.plugins.checkstyle; import javax.inject.Inject; import javax.inject.Named; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.Reader; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import com.puppycrawl.tools.checkstyle.DefaultLogger; import com.puppycrawl.tools.checkstyle.SarifLogger; import com.puppycrawl.tools.checkstyle.XMLLogger; import com.puppycrawl.tools.checkstyle.api.AuditListener; import com.puppycrawl.tools.checkstyle.api.AutomaticBean.OutputStreamOptions; import com.puppycrawl.tools.checkstyle.api.CheckstyleException; import org.apache.maven.artifact.Artifact; import org.apache.maven.model.Dependency; import org.apache.maven.model.Plugin; import org.apache.maven.model.PluginManagement; import org.apache.maven.model.Resource; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugin.descriptor.PluginDescriptor; import org.apache.maven.plugins.annotations.LifecyclePhase; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.plugins.annotations.ResolutionScope; import org.apache.maven.plugins.checkstyle.exec.CheckstyleExecutor; import org.apache.maven.plugins.checkstyle.exec.CheckstyleExecutorException; import org.apache.maven.plugins.checkstyle.exec.CheckstyleExecutorRequest; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.configuration.PlexusConfiguration; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.PathTool; import org.codehaus.plexus.util.ReaderFactory; import org.codehaus.plexus.util.xml.pull.MXParser; import org.codehaus.plexus.util.xml.pull.XmlPullParser; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; /** * Performs Checkstyle analysis and outputs violations or a count of violations * to the console, potentially failing the build. * It can also be configured to re-use an earlier analysis. * * @author <a href="mailto:joakim@erdfelt.net">Joakim Erdfelt</a> * */ @Mojo( name = "check", defaultPhase = LifecyclePhase.VERIFY, requiresDependencyResolution = ResolutionScope.NONE, threadSafe = true) public class CheckstyleViolationCheckMojo extends AbstractMojo { private static final String JAVA_FILES = "**\\/*.java"; private static final String DEFAULT_CONFIG_LOCATION = "sun_checks.xml"; /** * Specifies the path and filename to save the Checkstyle output. The format * of the output file is determined by the <code>outputFileFormat</code> * parameter. */ @Parameter(property = "checkstyle.output.file", defaultValue = "${project.build.directory}/checkstyle-result.xml") private File outputFile; /** * Specifies the format of the output to be used when writing to the output * file. Valid values are "<code>plain</code>", "<code>sarif</code>" and "<code>xml</code>". */ @Parameter(property = "checkstyle.output.format", defaultValue = "xml") private String outputFileFormat; /** * Fail the build on a violation. The goal checks for the violations * after logging them (if {@link #logViolationsToConsole} is {@code true}). * Compare this to {@link #failsOnError} which fails the build immediately * before examining the output log. */ @Parameter(property = "checkstyle.failOnViolation", defaultValue = "true") private boolean failOnViolation; /** * The maximum number of allowed violations. The execution fails only if the * number of violations is above this limit. * * @since 2.3 */ @Parameter(property = "checkstyle.maxAllowedViolations", defaultValue = "0") private int maxAllowedViolations; /** * The lowest severity level that is considered a violation. * Valid values are "<code>error</code>", "<code>warning</code>" and "<code>info</code>". * * @since 2.2 */ @Parameter(property = "checkstyle.violationSeverity", defaultValue = "error") private String violationSeverity = "error"; /** * Violations to ignore. This is a comma-separated list, each value being either * a rule name, a rule category or a java package name of rule class. * * @since 2.13 */ @Parameter(property = "checkstyle.violation.ignore") private String violationIgnore; /** * Skip entire check. * * @since 2.2 */ @Parameter(property = "checkstyle.skip", defaultValue = "false") private boolean skip; /** * Skip Checkstyle execution will only scan the outputFile. * * @since 2.5 */ @Parameter(property = "checkstyle.skipExec", defaultValue = "false") private boolean skipExec; /** * Output the detected violations to the console. * * @since 2.3 */ @Parameter(property = "checkstyle.console", defaultValue = "true") private boolean logViolationsToConsole; /** * Output the detected violation count to the console. * * @since 3.0.1 */ @Parameter(property = "checkstyle.logViolationCount", defaultValue = "true") private boolean logViolationCountToConsole; /** * Specifies the location of the resources to be used for Checkstyle. * * @since 2.11 */ @Parameter(defaultValue = "${project.resources}", readonly = true) protected List<Resource> resources; /** * Specifies the location of the test resources to be used for Checkstyle. * * @since 2.16 */ @Parameter(defaultValue = "${project.testResources}", readonly = true) protected List<Resource> testResources; /** * <p> * Specifies the location of the XML configuration to use. * <p> * Potential values are a filesystem path, a URL, or a classpath resource. * This parameter expects that the contents of the location conform to the * xml format (Checkstyle <a * href="https://checkstyle.org/config.html#Modules">Checker * module</a>) configuration of rulesets. * <p> * This parameter is resolved as resource, URL, then file. If successfully * resolved, the contents of the configuration is copied into the * <code>${project.build.directory}/checkstyle-configuration.xml</code> * file before being passed to Checkstyle as a configuration. * <p> * There are 2 predefined rulesets. * <ul> * <li><code>sun_checks.xml</code>: Sun Checks.</li> * <li><code>google_checks.xml</code>: Google Checks.</li> * </ul> * * @since 2.5 */ @Parameter(property = "checkstyle.config.location", defaultValue = DEFAULT_CONFIG_LOCATION) private String configLocation; /** * <p> * Specifies the location of the properties file. * <p> * This parameter is resolved as URL, File then resource. If successfully * resolved, the contents of the properties location is copied into the * <code>${project.build.directory}/checkstyle-checker.properties</code> * file before being passed to Checkstyle for loading. * <p> * The contents of the <code>propertiesLocation</code> will be made * available to Checkstyle for specifying values for parameters within the * xml configuration (specified in the <code>configLocation</code> * parameter). * * @since 2.5 */ @Parameter(property = "checkstyle.properties.location") private String propertiesLocation; /** * Allows for specifying raw property expansion information. */ @Parameter private String propertyExpansion; /** * <p> * Specifies the location of the License file (a.k.a. the header file) that * can be used by Checkstyle to verify that source code has the correct * license header. * <p> * You need to use <code>${checkstyle.header.file}</code> in your Checkstyle xml * configuration to reference the name of this header file. * <p> * For instance: * <pre> * &lt;module name="RegexpHeader"&gt; * &lt;property name="headerFile" value="${checkstyle.header.file}"/&gt; * &lt;/module&gt; * </pre> * * @since 2.0-beta-2 */ @Parameter(property = "checkstyle.header.file", defaultValue = "LICENSE.txt") private String headerLocation; /** * Specifies the cache file used to speed up Checkstyle on successive runs. */ @Parameter(defaultValue = "${project.build.directory}/checkstyle-cachefile") private String cacheFile; /** * The key to be used in the properties for the suppressions file. * * @since 2.1 */ @Parameter(property = "checkstyle.suppression.expression", defaultValue = "checkstyle.suppressions.file") private String suppressionsFileExpression; /** * <p> * Specifies the location of the suppressions XML file to use. * <p> * This parameter is resolved as resource, URL, then file. If successfully * resolved, the contents of the suppressions XML is copied into the * <code>${project.build.directory}/checkstyle-suppressions.xml</code> file * before being passed to Checkstyle for loading. * <p> * See <code>suppressionsFileExpression</code> for the property that will * be made available to your Checkstyle configuration. * * @since 2.0-beta-2 */ @Parameter(property = "checkstyle.suppressions.location") private String suppressionsLocation; /** * The file encoding to use when reading the source files. If the property <code>project.build.sourceEncoding</code> * is not set, the platform default encoding is used. <strong>Note:</strong> This parameter always overrides the * property <code>charset</code> from Checkstyle's <code>TreeWalker</code> module. * * @since 2.2 */ @Parameter(property = "encoding", defaultValue = "${project.build.sourceEncoding}") private String inputEncoding; /** * Output errors to console. */ @Parameter(property = "checkstyle.consoleOutput", defaultValue = "false") private boolean consoleOutput; /** * The Maven Project Object. */ @Parameter(defaultValue = "${project}", readonly = true, required = true) protected MavenProject project; /** * The Plugin Descriptor */ @Parameter(defaultValue = "${plugin}", readonly = true, required = true) private PluginDescriptor plugin; /** * If <code>null</code>, the Checkstyle plugin will display violations on stdout. * Otherwise, a text file will be created with the violations. */ @Parameter private File useFile; /** * Specifies the names filter of the source files to be excluded for * Checkstyle. */ @Parameter(property = "checkstyle.excludes") private String excludes; /** * Specifies the names filter of the source files to be used for Checkstyle. */ @Parameter(property = "checkstyle.includes", defaultValue = JAVA_FILES, required = true) private String includes; /** * Specifies the names filter of the files to be excluded for * Checkstyle when checking resources. * @since 2.11 */ @Parameter(property = "checkstyle.resourceExcludes") private String resourceExcludes; /** * Specifies the names filter of the files to be used for Checkstyle when checking resources. * @since 2.11 */ @Parameter(property = "checkstyle.resourceIncludes", defaultValue = "**/*.properties", required = true) private String resourceIncludes; /** * If this is true, and Checkstyle reported any violations or errors, * the build fails immediately after running Checkstyle, before checking the log * for {@link #logViolationsToConsole}. If you want to use {@link #logViolationsToConsole}, * use {@link #failOnViolation} instead of this. */ @Parameter(defaultValue = "false") private boolean failsOnError; /** * Specifies the location of the test source directory to be used for Checkstyle. * * @since 2.2 * @deprecated instead use {@link #testSourceDirectories}. For version 3.0.0, this parameter is only defined to * break the build if you use it! */ @Deprecated @Parameter private File testSourceDirectory; /** * Specifies the location of the test source directories to be used for Checkstyle. * Default value is <code>${project.testCompileSourceRoots}</code>. * @since 2.13 */ // Compatibility with all Maven 3: default of 'project.testCompileSourceRoots' is done manually because of MNG-5440 @Parameter private List<String> testSourceDirectories; /** * Include or not the test source directory to be used for Checkstyle. * * @since 2.2 */ @Parameter(defaultValue = "false") private boolean includeTestSourceDirectory; /** * Specifies the location of the source directory to be used for Checkstyle. * * @deprecated instead use {@link #sourceDirectories}. For version 3.0.0, this parameter is only defined to break * the build if you use it! */ @Deprecated @Parameter private File sourceDirectory; /** * Specifies the location of the source directories to be used for Checkstyle. * Default value is <code>${project.compileSourceRoots}</code>. * @since 2.13 */ // Compatibility with all Maven 3: default of 'project.compileSourceRoots' is done manually because of MNG-5440 @Parameter private List<String> sourceDirectories; /** * Whether to apply Checkstyle to resource directories. * @since 2.11 */ @Parameter(property = "checkstyle.includeResources", defaultValue = "true", required = true) private boolean includeResources = true; /** * Whether to apply Checkstyle to test resource directories. * @since 2.11 */ @Parameter(property = "checkstyle.includeTestResources", defaultValue = "true", required = true) private boolean includeTestResources = true; /** * By using this property, you can specify the whole Checkstyle rules * inline directly inside this pom. * * <pre> * &lt;plugin&gt; * ... * &lt;configuration&gt; * &lt;checkstyleRules&gt; * &lt;module name="Checker"&gt; * &lt;module name="FileTabCharacter"&gt; * &lt;property name="eachLine" value="true" /&gt; * &lt;/module&gt; * &lt;module name="TreeWalker"&gt; * &lt;module name="EmptyBlock"/&gt; * &lt;/module&gt; * &lt;/module&gt; * &lt;/checkstyleRules&gt; * &lt;/configuration&gt; * ... * </pre> * * @since 2.12 */ @Parameter private PlexusConfiguration checkstyleRules; /** * Dump file for inlined Checkstyle rules. */ @Parameter( property = "checkstyle.output.rules.file", defaultValue = "${project.build.directory}/checkstyle-rules.xml") private File rulesFiles; /** * The header to use for the inline configuration. * Only used when you specify {@code checkstyleRules}. */ @Parameter( defaultValue = "<?xml version=\"1.0\"?>\n" + "<!DOCTYPE module PUBLIC \"-//Checkstyle//DTD Checkstyle Configuration 1.3//EN\"\n" + " \"https://checkstyle.org/dtds/configuration_1_3.dtd\">\n") private String checkstyleRulesHeader; /** * Specifies whether modules with a configured severity of <code>ignore</code> should be omitted during Checkstyle * invocation. * * @since 3.0.0 */ @Parameter(defaultValue = "false") private boolean omitIgnoredModules; /** * Specifies whether generated source files should be excluded from Checkstyle. * * @since 3.3.1 */ @Parameter(property = "checkstyle.excludeGeneratedSources", defaultValue = "false") private boolean excludeGeneratedSources; private File outputXmlFile; /** * @since 2.5 */ protected final CheckstyleExecutor checkstyleExecutor; @Inject public CheckstyleViolationCheckMojo(final @Named("default") CheckstyleExecutor checkstyleExecutor) { this.checkstyleExecutor = checkstyleExecutor; } /** {@inheritDoc} */ @Override public void execute() throws MojoExecutionException, MojoFailureException { checkDeprecatedParameterUsage(sourceDirectory, "sourceDirectory", "sourceDirectories"); checkDeprecatedParameterUsage(testSourceDirectory, "testSourceDirectory", "testSourceDirectories"); if (skip) { return; } outputXmlFile = outputFile; if (!skipExec) { String effectiveConfigLocation = configLocation; if (checkstyleRules != null) { if (!DEFAULT_CONFIG_LOCATION.equals(configLocation)) { throw new MojoExecutionException( "If you use inline configuration for rules, don't specify " + "a configLocation"); } if (checkstyleRules.getChildCount() > 1) { throw new MojoExecutionException("Currently only one root module is supported"); } PlexusConfiguration checkerModule = checkstyleRules.getChild(0); try { FileUtils.forceMkdir(rulesFiles.getParentFile()); FileUtils.fileWrite(rulesFiles, checkstyleRulesHeader + checkerModule.toString()); } catch (final IOException e) { throw new MojoExecutionException(e.getMessage(), e); } effectiveConfigLocation = rulesFiles.getAbsolutePath(); } ClassLoader currentClassLoader = Thread.currentThread().getContextClassLoader(); try { CheckstyleExecutorRequest request = new CheckstyleExecutorRequest(); request.setConsoleListener(getConsoleListener()) .setConsoleOutput(consoleOutput) .setExcludes(excludes) .setFailsOnError(failsOnError) .setIncludes(includes) .setResourceIncludes(resourceIncludes) .setResourceExcludes(resourceExcludes) .setIncludeResources(includeResources) .setIncludeTestResources(includeTestResources) .setIncludeTestSourceDirectory(includeTestSourceDirectory) .setListener(getListener()) .setProject(project) .setSourceDirectories(getSourceDirectories()) .setResources(resources) .setTestResources(testResources) .setSuppressionsLocation(suppressionsLocation) .setTestSourceDirectories(getTestSourceDirectories()) .setConfigLocation(effectiveConfigLocation) .setConfigurationArtifacts(collectArtifacts("config")) .setPropertyExpansion(propertyExpansion) .setHeaderLocation(headerLocation) .setLicenseArtifacts(collectArtifacts("license")) .setCacheFile(cacheFile) .setSuppressionsFileExpression(suppressionsFileExpression) .setEncoding(inputEncoding) .setPropertiesLocation(propertiesLocation) .setOmitIgnoredModules(omitIgnoredModules); checkstyleExecutor.executeCheckstyle(request); } catch (CheckstyleException e) { throw new MojoExecutionException("Failed during checkstyle configuration", e); } catch (CheckstyleExecutorException e) { throw new MojoExecutionException("Failed during checkstyle execution", e); } finally { // be sure to restore original context classloader Thread.currentThread().setContextClassLoader(currentClassLoader); } } if (!"xml".equals(outputFileFormat) && skipExec) { throw new MojoExecutionException("Output format is '" + outputFileFormat + "', checkstyle:check requires format to be 'xml' when using skipExec."); } if (!outputXmlFile.exists()) { getLog().info("Unable to perform checkstyle:check, unable to find checkstyle:checkstyle outputFile."); return; } try (Reader reader = new BufferedReader(ReaderFactory.newXmlReader(outputXmlFile))) { XmlPullParser xpp = new MXParser(); xpp.setInput(reader); final List<Violation> violationsList = getViolations(xpp); long violationCount = countViolations(violationsList); printViolations(violationsList); String msg = "You have " + violationCount + " Checkstyle violation" + ((violationCount > 1 || violationCount == 0) ? "s" : "") + "."; if (violationCount > maxAllowedViolations) { if (failOnViolation) { if (maxAllowedViolations > 0) { msg += " The maximum number of allowed violations is " + maxAllowedViolations + "."; } throw new MojoFailureException(msg); } getLog().warn("checkstyle:check violations detected but failOnViolation set to false"); } if (logViolationCountToConsole) { if (maxAllowedViolations > 0) { msg += " The maximum number of allowed violations is " + maxAllowedViolations + "."; } getLog().info(msg); } } catch (IOException | XmlPullParserException e) { throw new MojoExecutionException( "Unable to read Checkstyle results xml: " + outputXmlFile.getAbsolutePath(), e); } } private void checkDeprecatedParameterUsage(Object parameter, String name, String replacement) throws MojoFailureException { if (parameter != null) { throw new MojoFailureException("You are using '" + name + "' which has been removed" + " from the maven-checkstyle-plugin. " + "Please use '" + replacement + "' and refer to the >>Major Version Upgrade to version 3.0.0<< " + "on the plugin site."); } } private List<Violation> getViolations(XmlPullParser xpp) throws XmlPullParserException, IOException { List<Violation> violations = new ArrayList<>(); String basedir = project.getBasedir().getAbsolutePath(); String file = ""; for (int eventType = xpp.getEventType(); eventType != XmlPullParser.END_DOCUMENT; eventType = xpp.next()) { if (eventType != XmlPullParser.START_TAG) { continue; } else if ("file".equals(xpp.getName())) { file = PathTool.getRelativeFilePath(basedir, xpp.getAttributeValue("", "name")); continue; } else if (!"error".equals(xpp.getName())) { continue; } String severity = xpp.getAttributeValue("", "severity"); String source = xpp.getAttributeValue("", "source"); String line = xpp.getAttributeValue("", "line"); /* Nullable */ String column = xpp.getAttributeValue("", "column"); String message = xpp.getAttributeValue("", "message"); String rule = RuleUtil.getName(source); String category = RuleUtil.getCategory(source); Violation violation = new Violation(source, file, line, severity, message, rule, category); if (column != null) { violation.setColumn(column); } violations.add(violation); } return violations; } private int countViolations(List<Violation> violations) { List<RuleUtil.Matcher> ignores = violationIgnore == null ? Collections.<RuleUtil.Matcher>emptyList() : RuleUtil.parseMatchers(violationIgnore.split(",")); int ignored = 0; int countedViolations = 0; for (Violation violation : violations) { if (!isViolation(violation.getSeverity())) { continue; } if (ignore(ignores, violation.getSource())) { ignored++; continue; } countedViolations++; } if (ignored > 0) { getLog().info("Ignored " + ignored + " error" + ((ignored > 1L) ? "s" : "") + ", " + countedViolations + " violation" + ((countedViolations > 1) ? "s" : "") + " remaining."); } return countedViolations; } private void printViolations(List<Violation> violations) { if (!logViolationsToConsole) { return; } List<RuleUtil.Matcher> ignores = violationIgnore == null ? Collections.<RuleUtil.Matcher>emptyList() : RuleUtil.parseMatchers(violationIgnore.split(",")); violations.stream() .filter(violation -> isViolation(violation.getSeverity())) .filter(violation -> !ignore(ignores, violation.getSource())) .forEach(violation -> { final String message = String.format( "%s:[%s%s] (%s) %s: %s", violation.getFile(), violation.getLine(), (Violation.NO_COLUMN.equals(violation.getColumn())) ? "" : (',' + violation.getColumn()), violation.getCategory(), violation.getRuleName(), violation.getMessage()); log(violation.getSeverity(), message); }); } private void log(String severity, String message) { if ("info".equals(severity)) { getLog().info(message); } else if ("warning".equals(severity)) { getLog().warn(message); } else { getLog().error(message); } } /** * Checks if the given severity is considered a violation. * * @param severity The severity to check * @return <code>true</code> if the given severity is a violation, otherwise <code>false</code> */ private boolean isViolation(String severity) { if ("error".equals(severity)) { return "error".equals(violationSeverity) || "warning".equals(violationSeverity) || "info".equals(violationSeverity); } else if ("warning".equals(severity)) { return "warning".equals(violationSeverity) || "info".equals(violationSeverity); } else if ("info".equals(severity)) { return "info".equals(violationSeverity); } else { return false; } } private boolean ignore(List<RuleUtil.Matcher> ignores, String source) { for (RuleUtil.Matcher ignore : ignores) { if (ignore.match(source)) { return true; } } return false; } private AuditListener getConsoleListener() throws MojoExecutionException { AuditListener consoleListener; if (useFile == null) { consoleListener = new MavenConsoleLogger(getLog()); } else { OutputStream out = getOutputStream(useFile); consoleListener = new DefaultLogger(out, OutputStreamOptions.CLOSE); } return consoleListener; } private OutputStream getOutputStream(File file) throws MojoExecutionException { File parentFile = file.getAbsoluteFile().getParentFile(); if (!parentFile.exists()) { parentFile.mkdirs(); } FileOutputStream fileOutputStream; try { fileOutputStream = new FileOutputStream(file); } catch (FileNotFoundException e) { throw new MojoExecutionException("Unable to create output stream: " + file, e); } return fileOutputStream; } private AuditListener getListener() throws MojoFailureException, MojoExecutionException { AuditListener listener = null; if (outputFileFormat != null && !outputFileFormat.isEmpty()) { File resultFile = outputFile; OutputStream out = getOutputStream(resultFile); if ("xml".equals(outputFileFormat)) { listener = new XMLLogger(out, OutputStreamOptions.CLOSE); } else if ("plain".equals(outputFileFormat)) { try { // Write a plain output file to the standard output file, // and write an XML output file to the temp directory that can be used to count violations outputXmlFile = Files.createTempFile("checkstyle-result", ".xml").toFile(); outputXmlFile.deleteOnExit(); OutputStream xmlOut = getOutputStream(outputXmlFile); CompositeAuditListener compoundListener = new CompositeAuditListener(); compoundListener.addListener(new XMLLogger(xmlOut, OutputStreamOptions.CLOSE)); compoundListener.addListener(new DefaultLogger(out, OutputStreamOptions.CLOSE)); listener = compoundListener; } catch (IOException e) { throw new MojoExecutionException("Unable to create temporary file", e); } } else if ("sarif".equals(outputFileFormat)) { try { // Write a sarif output file to the standard output file, // and write an XML output file to the temp directory that can be used to count violations outputXmlFile = Files.createTempFile("checkstyle-result", ".xml").toFile(); outputXmlFile.deleteOnExit(); OutputStream xmlOut = getOutputStream(outputXmlFile); CompositeAuditListener compoundListener = new CompositeAuditListener(); compoundListener.addListener(new XMLLogger(xmlOut, OutputStreamOptions.CLOSE)); compoundListener.addListener(new SarifLogger(out, OutputStreamOptions.CLOSE)); listener = compoundListener; } catch (IOException e) { throw new MojoExecutionException("Unable to create temporary file", e); } } else { throw new MojoFailureException( "Invalid output file format: (" + outputFileFormat + "). Must be 'plain' or 'xml'."); } } return listener; } private List<Artifact> collectArtifacts(String hint) { List<Artifact> artifacts = new ArrayList<>(); PluginManagement pluginManagement = project.getBuild().getPluginManagement(); if (pluginManagement != null) { artifacts.addAll(getCheckstylePluginDependenciesAsArtifacts(pluginManagement.getPluginsAsMap(), hint)); } artifacts.addAll( getCheckstylePluginDependenciesAsArtifacts(project.getBuild().getPluginsAsMap(), hint)); return artifacts; } private List<Artifact> getCheckstylePluginDependenciesAsArtifacts(Map<String, Plugin> plugins, String hint) { List<Artifact> artifacts = new ArrayList<>(); Plugin checkstylePlugin = plugins.get(plugin.getGroupId() + ":" + plugin.getArtifactId()); if (checkstylePlugin != null) { for (Dependency dep : checkstylePlugin.getDependencies()) { // @todo if we can filter on hints, it should be done here... String depKey = dep.getGroupId() + ":" + dep.getArtifactId(); artifacts.add(plugin.getArtifactMap().get(depKey)); } } return artifacts; } private List<File> getSourceDirectories() { if (sourceDirectories == null) { sourceDirectories = filterBuildTarget(project.getCompileSourceRoots()); } List<File> sourceDirs = new ArrayList<>(sourceDirectories.size()); for (String sourceDir : sourceDirectories) { sourceDirs.add(FileUtils.resolveFile(project.getBasedir(), sourceDir)); } return sourceDirs; } private List<File> getTestSourceDirectories() { if (testSourceDirectories == null) { testSourceDirectories = filterBuildTarget(project.getTestCompileSourceRoots()); } List<File> testSourceDirs = new ArrayList<>(testSourceDirectories.size()); for (String testSourceDir : testSourceDirectories) { testSourceDirs.add(FileUtils.resolveFile(project.getBasedir(), testSourceDir)); } return testSourceDirs; } private List<String> filterBuildTarget(List<String> sourceDirectories) { if (!excludeGeneratedSources) { return sourceDirectories; } List<String> filtered = new ArrayList<>(sourceDirectories.size()); Path buildTarget = FileUtils.resolveFile( project.getBasedir(), project.getBuild().getDirectory()) .toPath(); for (String sourceDir : sourceDirectories) { Path src = FileUtils.resolveFile(project.getBasedir(), sourceDir).toPath(); if (!src.startsWith(buildTarget)) { filtered.add(sourceDir); } } return filtered; } }
googleapis/google-cloud-java
35,244
java-cloudbuild/proto-google-cloud-build-v1/src/main/java/com/google/cloudbuild/v1/CreateBuildTriggerRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/cloudbuild/v1/cloudbuild.proto // Protobuf Java Version: 3.25.8 package com.google.cloudbuild.v1; /** * * * <pre> * Request to create a new `BuildTrigger`. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.CreateBuildTriggerRequest} */ public final class CreateBuildTriggerRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.cloudbuild.v1.CreateBuildTriggerRequest) CreateBuildTriggerRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateBuildTriggerRequest.newBuilder() to construct. private CreateBuildTriggerRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateBuildTriggerRequest() { parent_ = ""; projectId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateBuildTriggerRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_CreateBuildTriggerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_CreateBuildTriggerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.CreateBuildTriggerRequest.class, com.google.cloudbuild.v1.CreateBuildTriggerRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PROJECT_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object projectId_ = ""; /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The projectId. */ @java.lang.Override public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for projectId. */ @java.lang.Override public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TRIGGER_FIELD_NUMBER = 2; private com.google.cloudbuild.v1.BuildTrigger trigger_; /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the trigger field is set. */ @java.lang.Override public boolean hasTrigger() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The trigger. */ @java.lang.Override public com.google.cloudbuild.v1.BuildTrigger getTrigger() { return trigger_ == null ? com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance() : trigger_; } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloudbuild.v1.BuildTriggerOrBuilder getTriggerOrBuilder() { return trigger_ == null ? com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance() : trigger_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getTrigger()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, parent_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTrigger()); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, parent_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloudbuild.v1.CreateBuildTriggerRequest)) { return super.equals(obj); } com.google.cloudbuild.v1.CreateBuildTriggerRequest other = (com.google.cloudbuild.v1.CreateBuildTriggerRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getProjectId().equals(other.getProjectId())) return false; if (hasTrigger() != other.hasTrigger()) return false; if (hasTrigger()) { if (!getTrigger().equals(other.getTrigger())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); if (hasTrigger()) { hash = (37 * hash) + TRIGGER_FIELD_NUMBER; hash = (53 * hash) + getTrigger().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloudbuild.v1.CreateBuildTriggerRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to create a new `BuildTrigger`. * </pre> * * Protobuf type {@code google.devtools.cloudbuild.v1.CreateBuildTriggerRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.cloudbuild.v1.CreateBuildTriggerRequest) com.google.cloudbuild.v1.CreateBuildTriggerRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_CreateBuildTriggerRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_CreateBuildTriggerRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloudbuild.v1.CreateBuildTriggerRequest.class, com.google.cloudbuild.v1.CreateBuildTriggerRequest.Builder.class); } // Construct using com.google.cloudbuild.v1.CreateBuildTriggerRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getTriggerFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; projectId_ = ""; trigger_ = null; if (triggerBuilder_ != null) { triggerBuilder_.dispose(); triggerBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloudbuild.v1.Cloudbuild .internal_static_google_devtools_cloudbuild_v1_CreateBuildTriggerRequest_descriptor; } @java.lang.Override public com.google.cloudbuild.v1.CreateBuildTriggerRequest getDefaultInstanceForType() { return com.google.cloudbuild.v1.CreateBuildTriggerRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloudbuild.v1.CreateBuildTriggerRequest build() { com.google.cloudbuild.v1.CreateBuildTriggerRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloudbuild.v1.CreateBuildTriggerRequest buildPartial() { com.google.cloudbuild.v1.CreateBuildTriggerRequest result = new com.google.cloudbuild.v1.CreateBuildTriggerRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloudbuild.v1.CreateBuildTriggerRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.projectId_ = projectId_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.trigger_ = triggerBuilder_ == null ? trigger_ : triggerBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloudbuild.v1.CreateBuildTriggerRequest) { return mergeFrom((com.google.cloudbuild.v1.CreateBuildTriggerRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloudbuild.v1.CreateBuildTriggerRequest other) { if (other == com.google.cloudbuild.v1.CreateBuildTriggerRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasTrigger()) { mergeTrigger(other.getTrigger()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { projectId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 10 case 18: { input.readMessage(getTriggerFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 18 case 26: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The parent resource where this trigger will be created. * Format: `projects/{project}/locations/{location}` * </pre> * * <code>string parent = 3 [(.google.api.resource_reference) = { ... }</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object projectId_ = ""; /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The projectId. */ public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for projectId. */ public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The projectId to set. * @return This builder for chaining. */ public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. ID of the project for which to configure automatic builds. * </pre> * * <code>string project_id = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for projectId to set. * @return This builder for chaining. */ public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloudbuild.v1.BuildTrigger trigger_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.BuildTrigger, com.google.cloudbuild.v1.BuildTrigger.Builder, com.google.cloudbuild.v1.BuildTriggerOrBuilder> triggerBuilder_; /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the trigger field is set. */ public boolean hasTrigger() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The trigger. */ public com.google.cloudbuild.v1.BuildTrigger getTrigger() { if (triggerBuilder_ == null) { return trigger_ == null ? com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance() : trigger_; } else { return triggerBuilder_.getMessage(); } } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTrigger(com.google.cloudbuild.v1.BuildTrigger value) { if (triggerBuilder_ == null) { if (value == null) { throw new NullPointerException(); } trigger_ = value; } else { triggerBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTrigger(com.google.cloudbuild.v1.BuildTrigger.Builder builderForValue) { if (triggerBuilder_ == null) { trigger_ = builderForValue.build(); } else { triggerBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeTrigger(com.google.cloudbuild.v1.BuildTrigger value) { if (triggerBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && trigger_ != null && trigger_ != com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance()) { getTriggerBuilder().mergeFrom(value); } else { trigger_ = value; } } else { triggerBuilder_.mergeFrom(value); } if (trigger_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearTrigger() { bitField0_ = (bitField0_ & ~0x00000004); trigger_ = null; if (triggerBuilder_ != null) { triggerBuilder_.dispose(); triggerBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloudbuild.v1.BuildTrigger.Builder getTriggerBuilder() { bitField0_ |= 0x00000004; onChanged(); return getTriggerFieldBuilder().getBuilder(); } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloudbuild.v1.BuildTriggerOrBuilder getTriggerOrBuilder() { if (triggerBuilder_ != null) { return triggerBuilder_.getMessageOrBuilder(); } else { return trigger_ == null ? com.google.cloudbuild.v1.BuildTrigger.getDefaultInstance() : trigger_; } } /** * * * <pre> * Required. `BuildTrigger` to create. * </pre> * * <code> * .google.devtools.cloudbuild.v1.BuildTrigger trigger = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.BuildTrigger, com.google.cloudbuild.v1.BuildTrigger.Builder, com.google.cloudbuild.v1.BuildTriggerOrBuilder> getTriggerFieldBuilder() { if (triggerBuilder_ == null) { triggerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloudbuild.v1.BuildTrigger, com.google.cloudbuild.v1.BuildTrigger.Builder, com.google.cloudbuild.v1.BuildTriggerOrBuilder>( getTrigger(), getParentForChildren(), isClean()); trigger_ = null; } return triggerBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.cloudbuild.v1.CreateBuildTriggerRequest) } // @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.CreateBuildTriggerRequest) private static final com.google.cloudbuild.v1.CreateBuildTriggerRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloudbuild.v1.CreateBuildTriggerRequest(); } public static com.google.cloudbuild.v1.CreateBuildTriggerRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateBuildTriggerRequest> PARSER = new com.google.protobuf.AbstractParser<CreateBuildTriggerRequest>() { @java.lang.Override public CreateBuildTriggerRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateBuildTriggerRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateBuildTriggerRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloudbuild.v1.CreateBuildTriggerRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,542
java-dialogflow/google-cloud-dialogflow/src/main/java/com/google/cloud/dialogflow/v2beta1/stub/IntentsStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.v2beta1.stub; import static com.google.cloud.dialogflow.v2beta1.IntentsClient.ListIntentsPagedResponse; import static com.google.cloud.dialogflow.v2beta1.IntentsClient.ListLocationsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.v2beta1.BatchDeleteIntentsRequest; import com.google.cloud.dialogflow.v2beta1.BatchUpdateIntentsRequest; import com.google.cloud.dialogflow.v2beta1.BatchUpdateIntentsResponse; import com.google.cloud.dialogflow.v2beta1.CreateIntentRequest; import com.google.cloud.dialogflow.v2beta1.DeleteIntentRequest; import com.google.cloud.dialogflow.v2beta1.GetIntentRequest; import com.google.cloud.dialogflow.v2beta1.Intent; import com.google.cloud.dialogflow.v2beta1.ListIntentsRequest; import com.google.cloud.dialogflow.v2beta1.ListIntentsResponse; import com.google.cloud.dialogflow.v2beta1.UpdateIntentRequest; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.Struct; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link IntentsStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getIntent: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder(); * intentsSettingsBuilder * .getIntentSettings() * .setRetrySettings( * intentsSettingsBuilder * .getIntentSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * IntentsStubSettings intentsSettings = intentsSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for batchUpdateIntents: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * IntentsStubSettings.Builder intentsSettingsBuilder = IntentsStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * intentsSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class IntentsStubSettings extends StubSettings<IntentsStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/dialogflow") .build(); private final PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings; private final UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings; private final UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings; private final UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings; private final UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings; private final UnaryCallSettings<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings; private final OperationCallSettings<BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings; private final UnaryCallSettings<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings; private final OperationCallSettings<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent> LIST_INTENTS_PAGE_STR_DESC = new PagedListDescriptor<ListIntentsRequest, ListIntentsResponse, Intent>() { @Override public String emptyToken() { return ""; } @Override public ListIntentsRequest injectToken(ListIntentsRequest payload, String token) { return ListIntentsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListIntentsRequest injectPageSize(ListIntentsRequest payload, int pageSize) { return ListIntentsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListIntentsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListIntentsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Intent> extractResources(ListIntentsResponse payload) { return payload.getIntentsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> LIST_INTENTS_PAGE_STR_FACT = new PagedListResponseFactory< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse>() { @Override public ApiFuture<ListIntentsPagedResponse> getFuturePagedResponse( UnaryCallable<ListIntentsRequest, ListIntentsResponse> callable, ListIntentsRequest request, ApiCallContext context, ApiFuture<ListIntentsResponse> futureResponse) { PageContext<ListIntentsRequest, ListIntentsResponse, Intent> pageContext = PageContext.create(callable, LIST_INTENTS_PAGE_STR_DESC, request, context); return ListIntentsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listIntents. */ public PagedCallSettings<ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings() { return listIntentsSettings; } /** Returns the object with the settings used for calls to getIntent. */ public UnaryCallSettings<GetIntentRequest, Intent> getIntentSettings() { return getIntentSettings; } /** Returns the object with the settings used for calls to createIntent. */ public UnaryCallSettings<CreateIntentRequest, Intent> createIntentSettings() { return createIntentSettings; } /** Returns the object with the settings used for calls to updateIntent. */ public UnaryCallSettings<UpdateIntentRequest, Intent> updateIntentSettings() { return updateIntentSettings; } /** Returns the object with the settings used for calls to deleteIntent. */ public UnaryCallSettings<DeleteIntentRequest, Empty> deleteIntentSettings() { return deleteIntentSettings; } /** Returns the object with the settings used for calls to batchUpdateIntents. */ public UnaryCallSettings<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings() { return batchUpdateIntentsSettings; } /** Returns the object with the settings used for calls to batchUpdateIntents. */ public OperationCallSettings<BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings() { return batchUpdateIntentsOperationSettings; } /** Returns the object with the settings used for calls to batchDeleteIntents. */ public UnaryCallSettings<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings() { return batchDeleteIntentsSettings; } /** Returns the object with the settings used for calls to batchDeleteIntents. */ public OperationCallSettings<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings() { return batchDeleteIntentsOperationSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public IntentsStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcIntentsStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonIntentsStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dialogflow"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dialogflow.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dialogflow.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(IntentsStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return IntentsStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected IntentsStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listIntentsSettings = settingsBuilder.listIntentsSettings().build(); getIntentSettings = settingsBuilder.getIntentSettings().build(); createIntentSettings = settingsBuilder.createIntentSettings().build(); updateIntentSettings = settingsBuilder.updateIntentSettings().build(); deleteIntentSettings = settingsBuilder.deleteIntentSettings().build(); batchUpdateIntentsSettings = settingsBuilder.batchUpdateIntentsSettings().build(); batchUpdateIntentsOperationSettings = settingsBuilder.batchUpdateIntentsOperationSettings().build(); batchDeleteIntentsSettings = settingsBuilder.batchDeleteIntentsSettings().build(); batchDeleteIntentsOperationSettings = settingsBuilder.batchDeleteIntentsOperationSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for IntentsStubSettings. */ public static class Builder extends StubSettings.Builder<IntentsStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings; private final UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings; private final UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings; private final UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings; private final UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings; private final UnaryCallSettings.Builder<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings; private final OperationCallSettings.Builder< BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings; private final UnaryCallSettings.Builder<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings; private final OperationCallSettings.Builder<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listIntentsSettings = PagedCallSettings.newBuilder(LIST_INTENTS_PAGE_STR_FACT); getIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteIntentSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchUpdateIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchUpdateIntentsOperationSettings = OperationCallSettings.newBuilder(); batchDeleteIntentsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchDeleteIntentsOperationSettings = OperationCallSettings.newBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listIntentsSettings, getIntentSettings, createIntentSettings, updateIntentSettings, deleteIntentSettings, batchUpdateIntentsSettings, batchDeleteIntentsSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(IntentsStubSettings settings) { super(settings); listIntentsSettings = settings.listIntentsSettings.toBuilder(); getIntentSettings = settings.getIntentSettings.toBuilder(); createIntentSettings = settings.createIntentSettings.toBuilder(); updateIntentSettings = settings.updateIntentSettings.toBuilder(); deleteIntentSettings = settings.deleteIntentSettings.toBuilder(); batchUpdateIntentsSettings = settings.batchUpdateIntentsSettings.toBuilder(); batchUpdateIntentsOperationSettings = settings.batchUpdateIntentsOperationSettings.toBuilder(); batchDeleteIntentsSettings = settings.batchDeleteIntentsSettings.toBuilder(); batchDeleteIntentsOperationSettings = settings.batchDeleteIntentsOperationSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listIntentsSettings, getIntentSettings, createIntentSettings, updateIntentSettings, deleteIntentSettings, batchUpdateIntentsSettings, batchDeleteIntentsSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updateIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deleteIntentSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .batchUpdateIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .batchDeleteIntentsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .batchUpdateIntentsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<BatchUpdateIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create( BatchUpdateIntentsResponse.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Struct.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); builder .batchDeleteIntentsOperationSettings() .setInitialCallSettings( UnaryCallSettings .<BatchDeleteIntentsRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(Empty.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(Struct.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to listIntents. */ public PagedCallSettings.Builder< ListIntentsRequest, ListIntentsResponse, ListIntentsPagedResponse> listIntentsSettings() { return listIntentsSettings; } /** Returns the builder for the settings used for calls to getIntent. */ public UnaryCallSettings.Builder<GetIntentRequest, Intent> getIntentSettings() { return getIntentSettings; } /** Returns the builder for the settings used for calls to createIntent. */ public UnaryCallSettings.Builder<CreateIntentRequest, Intent> createIntentSettings() { return createIntentSettings; } /** Returns the builder for the settings used for calls to updateIntent. */ public UnaryCallSettings.Builder<UpdateIntentRequest, Intent> updateIntentSettings() { return updateIntentSettings; } /** Returns the builder for the settings used for calls to deleteIntent. */ public UnaryCallSettings.Builder<DeleteIntentRequest, Empty> deleteIntentSettings() { return deleteIntentSettings; } /** Returns the builder for the settings used for calls to batchUpdateIntents. */ public UnaryCallSettings.Builder<BatchUpdateIntentsRequest, Operation> batchUpdateIntentsSettings() { return batchUpdateIntentsSettings; } /** Returns the builder for the settings used for calls to batchUpdateIntents. */ public OperationCallSettings.Builder< BatchUpdateIntentsRequest, BatchUpdateIntentsResponse, Struct> batchUpdateIntentsOperationSettings() { return batchUpdateIntentsOperationSettings; } /** Returns the builder for the settings used for calls to batchDeleteIntents. */ public UnaryCallSettings.Builder<BatchDeleteIntentsRequest, Operation> batchDeleteIntentsSettings() { return batchDeleteIntentsSettings; } /** Returns the builder for the settings used for calls to batchDeleteIntents. */ public OperationCallSettings.Builder<BatchDeleteIntentsRequest, Empty, Struct> batchDeleteIntentsOperationSettings() { return batchDeleteIntentsOperationSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public IntentsStubSettings build() throws IOException { return new IntentsStubSettings(this); } } }
apache/hbase
35,439
hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/FanOutOneBlockAsyncDFSOutputSaslHelper.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.io.asyncfs; import static org.apache.hadoop.hbase.util.NettyFutureUtils.safeWrite; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY; import static org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleState.READER_IDLE; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.security.GeneralSecurityException; import java.util.Arrays; import java.util.Base64; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.NameCallback; import javax.security.auth.callback.PasswordCallback; import javax.security.auth.callback.UnsupportedCallbackException; import javax.security.sasl.RealmCallback; import javax.security.sasl.RealmChoiceCallback; import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; import javax.security.sasl.SaslException; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.CipherOption; import org.apache.hadoop.crypto.CipherSuite; import org.apache.hadoop.crypto.CryptoCodec; import org.apache.hadoop.crypto.Decryptor; import org.apache.hadoop.crypto.Encryptor; import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.crypto.key.KeyProvider.KeyVersion; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.datatransfer.InvalidEncryptionKeyException; import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver; import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto; import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; import org.apache.hbase.thirdparty.com.google.common.collect.Maps; import org.apache.hbase.thirdparty.com.google.protobuf.CodedOutputStream; import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf; import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufOutputStream; import org.apache.hbase.thirdparty.io.netty.buffer.CompositeByteBuf; import org.apache.hbase.thirdparty.io.netty.buffer.Unpooled; import org.apache.hbase.thirdparty.io.netty.channel.Channel; import org.apache.hbase.thirdparty.io.netty.channel.ChannelDuplexHandler; import org.apache.hbase.thirdparty.io.netty.channel.ChannelHandlerContext; import org.apache.hbase.thirdparty.io.netty.channel.ChannelOutboundHandlerAdapter; import org.apache.hbase.thirdparty.io.netty.channel.ChannelPipeline; import org.apache.hbase.thirdparty.io.netty.channel.ChannelPromise; import org.apache.hbase.thirdparty.io.netty.channel.SimpleChannelInboundHandler; import org.apache.hbase.thirdparty.io.netty.handler.codec.LengthFieldBasedFrameDecoder; import org.apache.hbase.thirdparty.io.netty.handler.codec.MessageToByteEncoder; import org.apache.hbase.thirdparty.io.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateEvent; import org.apache.hbase.thirdparty.io.netty.handler.timeout.IdleStateHandler; import org.apache.hbase.thirdparty.io.netty.util.concurrent.Promise; /** * Helper class for adding sasl support for {@link FanOutOneBlockAsyncDFSOutput}. */ @InterfaceAudience.Private public final class FanOutOneBlockAsyncDFSOutputSaslHelper { private static final Logger LOG = LoggerFactory.getLogger(FanOutOneBlockAsyncDFSOutputSaslHelper.class); private FanOutOneBlockAsyncDFSOutputSaslHelper() { } private static final String SERVER_NAME = "0"; private static final String PROTOCOL = "hdfs"; private static final String MECHANISM = org.apache.hadoop.security.SaslRpcServer.AuthMethod.TOKEN.getMechanismName(); private static final int SASL_TRANSFER_MAGIC_NUMBER = 0xDEADBEEF; private static final String NAME_DELIMITER = " "; private interface SaslAdaptor { TrustedChannelResolver getTrustedChannelResolver(SaslDataTransferClient saslClient); SaslPropertiesResolver getSaslPropsResolver(SaslDataTransferClient saslClient); AtomicBoolean getFallbackToSimpleAuth(SaslDataTransferClient saslClient); } private static final SaslAdaptor SASL_ADAPTOR; private interface TransparentCryptoHelper { Encryptor createEncryptor(Configuration conf, FileEncryptionInfo feInfo, DFSClient client) throws IOException; } private static final TransparentCryptoHelper TRANSPARENT_CRYPTO_HELPER; private static SaslAdaptor createSaslAdaptor() throws NoSuchFieldException, NoSuchMethodException { Field saslPropsResolverField = SaslDataTransferClient.class.getDeclaredField("saslPropsResolver"); saslPropsResolverField.setAccessible(true); Field trustedChannelResolverField = SaslDataTransferClient.class.getDeclaredField("trustedChannelResolver"); trustedChannelResolverField.setAccessible(true); Field fallbackToSimpleAuthField = SaslDataTransferClient.class.getDeclaredField("fallbackToSimpleAuth"); fallbackToSimpleAuthField.setAccessible(true); return new SaslAdaptor() { @Override public TrustedChannelResolver getTrustedChannelResolver(SaslDataTransferClient saslClient) { try { return (TrustedChannelResolver) trustedChannelResolverField.get(saslClient); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } @Override public SaslPropertiesResolver getSaslPropsResolver(SaslDataTransferClient saslClient) { try { return (SaslPropertiesResolver) saslPropsResolverField.get(saslClient); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } @Override public AtomicBoolean getFallbackToSimpleAuth(SaslDataTransferClient saslClient) { try { return (AtomicBoolean) fallbackToSimpleAuthField.get(saslClient); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }; } private static TransparentCryptoHelper createTransparentCryptoHelperWithoutHDFS12396() throws NoSuchMethodException { Method decryptEncryptedDataEncryptionKeyMethod = DFSClient.class .getDeclaredMethod("decryptEncryptedDataEncryptionKey", FileEncryptionInfo.class); decryptEncryptedDataEncryptionKeyMethod.setAccessible(true); return new TransparentCryptoHelper() { @Override public Encryptor createEncryptor(Configuration conf, FileEncryptionInfo feInfo, DFSClient client) throws IOException { try { KeyVersion decryptedKey = (KeyVersion) decryptEncryptedDataEncryptionKeyMethod.invoke(client, feInfo); CryptoCodec cryptoCodec = CryptoCodec.getInstance(conf, feInfo.getCipherSuite()); Encryptor encryptor = cryptoCodec.createEncryptor(); encryptor.init(decryptedKey.getMaterial(), feInfo.getIV()); return encryptor; } catch (InvocationTargetException e) { Throwables.propagateIfPossible(e.getTargetException(), IOException.class); throw new RuntimeException(e.getTargetException()); } catch (GeneralSecurityException e) { throw new IOException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }; } private static TransparentCryptoHelper createTransparentCryptoHelperWithHDFS12396() throws ClassNotFoundException, NoSuchMethodException { Class<?> hdfsKMSUtilCls = Class.forName("org.apache.hadoop.hdfs.HdfsKMSUtil"); Method decryptEncryptedDataEncryptionKeyMethod = hdfsKMSUtilCls.getDeclaredMethod( "decryptEncryptedDataEncryptionKey", FileEncryptionInfo.class, KeyProvider.class); decryptEncryptedDataEncryptionKeyMethod.setAccessible(true); return new TransparentCryptoHelper() { @Override public Encryptor createEncryptor(Configuration conf, FileEncryptionInfo feInfo, DFSClient client) throws IOException { try { KeyVersion decryptedKey = (KeyVersion) decryptEncryptedDataEncryptionKeyMethod .invoke(null, feInfo, client.getKeyProvider()); CryptoCodec cryptoCodec = CryptoCodec.getInstance(conf, feInfo.getCipherSuite()); Encryptor encryptor = cryptoCodec.createEncryptor(); encryptor.init(decryptedKey.getMaterial(), feInfo.getIV()); return encryptor; } catch (InvocationTargetException e) { Throwables.propagateIfPossible(e.getTargetException(), IOException.class); throw new RuntimeException(e.getTargetException()); } catch (GeneralSecurityException e) { throw new IOException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }; } private static TransparentCryptoHelper createTransparentCryptoHelper() throws NoSuchMethodException, ClassNotFoundException { try { return createTransparentCryptoHelperWithoutHDFS12396(); } catch (NoSuchMethodException e) { LOG.debug("No decryptEncryptedDataEncryptionKey method in DFSClient," + " should be hadoop version with HDFS-12396", e); } return createTransparentCryptoHelperWithHDFS12396(); } static { try { SASL_ADAPTOR = createSaslAdaptor(); TRANSPARENT_CRYPTO_HELPER = createTransparentCryptoHelper(); } catch (Exception e) { String msg = "Couldn't properly initialize access to HDFS internals. Please " + "update your WAL Provider to not make use of the 'asyncfs' provider. See " + "HBASE-16110 for more information."; LOG.error(msg, e); throw new Error(msg, e); } } /** * Sets user name and password when asked by the client-side SASL object. */ private static final class SaslClientCallbackHandler implements CallbackHandler { private final char[] password; private final String userName; /** * Creates a new SaslClientCallbackHandler. * @param userName SASL user name * @param password SASL password */ public SaslClientCallbackHandler(String userName, char[] password) { this.password = password; this.userName = userName; } @Override public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { NameCallback nc = null; PasswordCallback pc = null; RealmCallback rc = null; for (Callback callback : callbacks) { if (callback instanceof RealmChoiceCallback) { continue; } else if (callback instanceof NameCallback) { nc = (NameCallback) callback; } else if (callback instanceof PasswordCallback) { pc = (PasswordCallback) callback; } else if (callback instanceof RealmCallback) { rc = (RealmCallback) callback; } else { throw new UnsupportedCallbackException(callback, "Unrecognized SASL client callback"); } } if (nc != null) { nc.setName(userName); } if (pc != null) { pc.setPassword(password); } if (rc != null) { rc.setText(rc.getDefaultText()); } } } private static final class SaslNegotiateHandler extends ChannelDuplexHandler { private final Configuration conf; private final Map<String, String> saslProps; private final SaslClient saslClient; private final int timeoutMs; private final Promise<Void> promise; private final DFSClient dfsClient; private int step = 0; public SaslNegotiateHandler(Configuration conf, String username, char[] password, Map<String, String> saslProps, int timeoutMs, Promise<Void> promise, DFSClient dfsClient) throws SaslException { this.conf = conf; this.saslProps = saslProps; this.saslClient = Sasl.createSaslClient(new String[] { MECHANISM }, username, PROTOCOL, SERVER_NAME, saslProps, new SaslClientCallbackHandler(username, password)); this.timeoutMs = timeoutMs; this.promise = promise; this.dfsClient = dfsClient; } private void sendSaslMessage(ChannelHandlerContext ctx, byte[] payload) throws IOException { sendSaslMessage(ctx, payload, null); } private List<CipherOption> getCipherOptions() throws IOException { // Negotiate cipher suites if configured. Currently, the only supported // cipher suite is AES/CTR/NoPadding, but the protocol allows multiple // values for future expansion. String cipherSuites = conf.get(DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY); if (StringUtils.isBlank(cipherSuites)) { return null; } if (!cipherSuites.equals(CipherSuite.AES_CTR_NOPADDING.getName())) { throw new IOException(String.format("Invalid cipher suite, %s=%s", DFS_ENCRYPT_DATA_TRANSFER_CIPHER_SUITES_KEY, cipherSuites)); } return Collections.singletonList(new CipherOption(CipherSuite.AES_CTR_NOPADDING)); } /** * The asyncfs subsystem emulates a HDFS client by sending protobuf messages via netty. After * Hadoop 3.3.0, the protobuf classes are relocated to org.apache.hadoop.thirdparty.protobuf.*. * Use Reflection to check which ones to use. */ private static class BuilderPayloadSetter { private static Method setPayloadMethod; private static Constructor<?> constructor; /** * Create a ByteString from byte array without copying (wrap), and then set it as the payload * for the builder. * @param builder builder for HDFS DataTransferEncryptorMessage. * @param payload byte array of payload. */ static void wrapAndSetPayload(DataTransferEncryptorMessageProto.Builder builder, byte[] payload) throws IOException { Object byteStringObject; try { // byteStringObject = new LiteralByteString(payload); byteStringObject = constructor.newInstance(payload); // builder.setPayload(byteStringObject); setPayloadMethod.invoke(builder, constructor.getDeclaringClass().cast(byteStringObject)); } catch (IllegalAccessException | InstantiationException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { Throwables.propagateIfPossible(e.getTargetException(), IOException.class); throw new RuntimeException(e.getTargetException()); } } static { Class<?> builderClass = DataTransferEncryptorMessageProto.Builder.class; // Try the unrelocated ByteString Class<?> byteStringClass; try { // See if it can load the relocated ByteString, which comes from hadoop-thirdparty. byteStringClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.ByteString"); LOG.debug("Found relocated ByteString class from hadoop-thirdparty." + " Assuming this is Hadoop 3.3.0+."); } catch (ClassNotFoundException e) { LOG.debug("Did not find relocated ByteString class from hadoop-thirdparty." + " Assuming this is below Hadoop 3.3.0", e); try { byteStringClass = Class.forName("com.google.protobuf.ByteString"); LOG.debug("com.google.protobuf.ByteString found."); } catch (ClassNotFoundException ex) { throw new RuntimeException(ex); } } // LiteralByteString is a package private class in protobuf. Make it accessible. Class<?> literalByteStringClass; try { literalByteStringClass = Class.forName("org.apache.hadoop.thirdparty.protobuf.ByteString$LiteralByteString"); LOG.debug("Shaded LiteralByteString from hadoop-thirdparty is found."); } catch (ClassNotFoundException e) { try { literalByteStringClass = Class.forName("com.google.protobuf.LiteralByteString"); LOG.debug("com.google.protobuf.LiteralByteString found."); } catch (ClassNotFoundException ex) { throw new RuntimeException(ex); } } try { constructor = literalByteStringClass.getDeclaredConstructor(byte[].class); constructor.setAccessible(true); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } try { setPayloadMethod = builderClass.getMethod("setPayload", byteStringClass); } catch (NoSuchMethodException e) { // if either method is not found, we are in big trouble. Abort. throw new RuntimeException(e); } } } private void sendSaslMessage(ChannelHandlerContext ctx, byte[] payload, List<CipherOption> options) throws IOException { DataTransferEncryptorMessageProto.Builder builder = DataTransferEncryptorMessageProto.newBuilder(); builder.setStatus(DataTransferEncryptorStatus.SUCCESS); if (payload != null) { BuilderPayloadSetter.wrapAndSetPayload(builder, payload); } if (options != null) { builder.addAllCipherOption(PBHelperClient.convertCipherOptions(options)); } DataTransferEncryptorMessageProto proto = builder.build(); int size = proto.getSerializedSize(); size += CodedOutputStream.computeUInt32SizeNoTag(size); ByteBuf buf = ctx.alloc().buffer(size); proto.writeDelimitedTo(new ByteBufOutputStream(buf)); safeWrite(ctx, buf); } @Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { safeWrite(ctx, ctx.alloc().buffer(4).writeInt(SASL_TRANSFER_MAGIC_NUMBER)); byte[] firstMessage = new byte[0]; if (saslClient.hasInitialResponse()) { firstMessage = saslClient.evaluateChallenge(firstMessage); } sendSaslMessage(ctx, firstMessage); ctx.flush(); step++; } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { saslClient.dispose(); } private void check(DataTransferEncryptorMessageProto proto) throws IOException { if (proto.getStatus() == DataTransferEncryptorStatus.ERROR_UNKNOWN_KEY) { dfsClient.clearDataEncryptionKey(); throw new InvalidEncryptionKeyException(proto.getMessage()); } else if (proto.getStatus() == DataTransferEncryptorStatus.ERROR) { throw new IOException(proto.getMessage()); } } private String getNegotiatedQop() { return (String) saslClient.getNegotiatedProperty(Sasl.QOP); } private boolean isNegotiatedQopPrivacy() { String qop = getNegotiatedQop(); return qop != null && "auth-conf".equalsIgnoreCase(qop); } private boolean requestedQopContainsPrivacy() { Set<String> requestedQop = ImmutableSet.copyOf(Arrays.asList(saslProps.get(Sasl.QOP).split(","))); return requestedQop.contains("auth-conf"); } private void checkSaslComplete() throws IOException { if (!saslClient.isComplete()) { throw new IOException("Failed to complete SASL handshake"); } Set<String> requestedQop = ImmutableSet.copyOf(Arrays.asList(saslProps.get(Sasl.QOP).split(","))); String negotiatedQop = getNegotiatedQop(); // Treat null negotiated QOP as "auth" for the purpose of verification // Code elsewhere does the same implicitly if (negotiatedQop == null) { negotiatedQop = "auth"; } LOG.debug( "Verifying QOP, requested QOP = " + requestedQop + ", negotiated QOP = " + negotiatedQop); if (!requestedQop.contains(negotiatedQop)) { throw new IOException(String.format("SASL handshake completed, but " + "channel does not have acceptable quality of protection, " + "requested = %s, negotiated(effective) = %s", requestedQop, negotiatedQop)); } } private boolean useWrap() { String qop = (String) saslClient.getNegotiatedProperty(Sasl.QOP); return qop != null && !"auth".equalsIgnoreCase(qop); } private CipherOption unwrap(CipherOption option, SaslClient saslClient) throws IOException { byte[] inKey = option.getInKey(); if (inKey != null) { inKey = saslClient.unwrap(inKey, 0, inKey.length); } byte[] outKey = option.getOutKey(); if (outKey != null) { outKey = saslClient.unwrap(outKey, 0, outKey.length); } return new CipherOption(option.getCipherSuite(), inKey, option.getInIv(), outKey, option.getOutIv()); } private CipherOption getCipherOption(DataTransferEncryptorMessageProto proto, boolean isNegotiatedQopPrivacy, SaslClient saslClient) throws IOException { List<CipherOption> cipherOptions = PBHelperClient.convertCipherOptionProtos(proto.getCipherOptionList()); if (cipherOptions == null || cipherOptions.isEmpty()) { return null; } CipherOption cipherOption = cipherOptions.get(0); return isNegotiatedQopPrivacy ? unwrap(cipherOption, saslClient) : cipherOption; } @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { if (msg instanceof DataTransferEncryptorMessageProto) { DataTransferEncryptorMessageProto proto = (DataTransferEncryptorMessageProto) msg; check(proto); byte[] challenge = proto.getPayload().toByteArray(); byte[] response = saslClient.evaluateChallenge(challenge); switch (step) { case 1: { List<CipherOption> cipherOptions = null; if (requestedQopContainsPrivacy()) { cipherOptions = getCipherOptions(); } sendSaslMessage(ctx, response, cipherOptions); ctx.flush(); step++; break; } case 2: { assert response == null; checkSaslComplete(); CipherOption cipherOption = getCipherOption(proto, isNegotiatedQopPrivacy(), saslClient); ChannelPipeline p = ctx.pipeline(); while (p.first() != null) { p.removeFirst(); } if (cipherOption != null) { CryptoCodec codec = CryptoCodec.getInstance(conf, cipherOption.getCipherSuite()); p.addLast(new EncryptHandler(codec, cipherOption.getInKey(), cipherOption.getInIv()), new DecryptHandler(codec, cipherOption.getOutKey(), cipherOption.getOutIv())); } else { if (useWrap()) { p.addLast(new SaslWrapHandler(saslClient), new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, 4), new SaslUnwrapHandler(saslClient)); } } promise.trySuccess(null); break; } default: throw new IllegalArgumentException("Unrecognized negotiation step: " + step); } } else { ctx.fireChannelRead(msg); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { promise.tryFailure(cause); } @Override public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws Exception { if (evt instanceof IdleStateEvent && ((IdleStateEvent) evt).state() == READER_IDLE) { promise.tryFailure(new IOException("Timeout(" + timeoutMs + "ms) waiting for response")); } else { super.userEventTriggered(ctx, evt); } } } private static final class SaslUnwrapHandler extends SimpleChannelInboundHandler<ByteBuf> { private final SaslClient saslClient; public SaslUnwrapHandler(SaslClient saslClient) { this.saslClient = saslClient; } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { saslClient.dispose(); } @Override protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { msg.skipBytes(4); byte[] b = new byte[msg.readableBytes()]; msg.readBytes(b); ctx.fireChannelRead(Unpooled.wrappedBuffer(saslClient.unwrap(b, 0, b.length))); } } private static final class SaslWrapHandler extends ChannelOutboundHandlerAdapter { private final SaslClient saslClient; private CompositeByteBuf cBuf; public SaslWrapHandler(SaslClient saslClient) { this.saslClient = saslClient; } @Override public void handlerAdded(ChannelHandlerContext ctx) throws Exception { cBuf = new CompositeByteBuf(ctx.alloc(), false, Integer.MAX_VALUE); } @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; cBuf.addComponent(buf); cBuf.writerIndex(cBuf.writerIndex() + buf.readableBytes()); } else { safeWrite(ctx, msg); } } @Override public void flush(ChannelHandlerContext ctx) throws Exception { if (cBuf.isReadable()) { byte[] b = new byte[cBuf.readableBytes()]; cBuf.readBytes(b); cBuf.discardReadComponents(); byte[] wrapped = saslClient.wrap(b, 0, b.length); ByteBuf buf = ctx.alloc().ioBuffer(4 + wrapped.length); buf.writeInt(wrapped.length); buf.writeBytes(wrapped); safeWrite(ctx, buf); } ctx.flush(); } @Override public void handlerRemoved(ChannelHandlerContext ctx) throws Exception { // Release buffer on removal. cBuf.release(); cBuf = null; } } private static final class DecryptHandler extends SimpleChannelInboundHandler<ByteBuf> { private final Decryptor decryptor; public DecryptHandler(CryptoCodec codec, byte[] key, byte[] iv) throws GeneralSecurityException, IOException { this.decryptor = codec.createDecryptor(); this.decryptor.init(key, Arrays.copyOf(iv, iv.length)); } @Override protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg) throws Exception { ByteBuf inBuf; boolean release = false; if (msg.nioBufferCount() == 1) { inBuf = msg; } else { inBuf = ctx.alloc().directBuffer(msg.readableBytes()); msg.readBytes(inBuf); release = true; } ByteBuffer inBuffer = inBuf.nioBuffer(); ByteBuf outBuf = ctx.alloc().directBuffer(inBuf.readableBytes()); ByteBuffer outBuffer = outBuf.nioBuffer(0, inBuf.readableBytes()); decryptor.decrypt(inBuffer, outBuffer); outBuf.writerIndex(inBuf.readableBytes()); if (release) { inBuf.release(); } ctx.fireChannelRead(outBuf); } } private static final class EncryptHandler extends MessageToByteEncoder<ByteBuf> { private final Encryptor encryptor; public EncryptHandler(CryptoCodec codec, byte[] key, byte[] iv) throws GeneralSecurityException, IOException { this.encryptor = codec.createEncryptor(); this.encryptor.init(key, Arrays.copyOf(iv, iv.length)); } @Override protected ByteBuf allocateBuffer(ChannelHandlerContext ctx, ByteBuf msg, boolean preferDirect) throws Exception { if (preferDirect) { return ctx.alloc().directBuffer(msg.readableBytes()); } else { return ctx.alloc().buffer(msg.readableBytes()); } } @Override protected void encode(ChannelHandlerContext ctx, ByteBuf msg, ByteBuf out) throws Exception { ByteBuf inBuf; boolean release = false; if (msg.nioBufferCount() == 1) { inBuf = msg; } else { inBuf = ctx.alloc().directBuffer(msg.readableBytes()); msg.readBytes(inBuf); release = true; } ByteBuffer inBuffer = inBuf.nioBuffer(); ByteBuffer outBuffer = out.nioBuffer(0, inBuf.readableBytes()); encryptor.encrypt(inBuffer, outBuffer); out.writerIndex(inBuf.readableBytes()); if (release) { inBuf.release(); } } } private static String getUserNameFromEncryptionKey(DataEncryptionKey encryptionKey) { return encryptionKey.keyId + NAME_DELIMITER + encryptionKey.blockPoolId + NAME_DELIMITER + Base64.getEncoder().encodeToString(encryptionKey.nonce); } private static char[] encryptionKeyToPassword(byte[] encryptionKey) { return Base64.getEncoder().encodeToString(encryptionKey).toCharArray(); } private static String buildUsername(Token<BlockTokenIdentifier> blockToken) { return Base64.getEncoder().encodeToString(blockToken.getIdentifier()); } private static char[] buildClientPassword(Token<BlockTokenIdentifier> blockToken) { return Base64.getEncoder().encodeToString(blockToken.getPassword()).toCharArray(); } private static Map<String, String> createSaslPropertiesForEncryption(String encryptionAlgorithm) { Map<String, String> saslProps = Maps.newHashMapWithExpectedSize(3); saslProps.put(Sasl.QOP, QualityOfProtection.PRIVACY.getSaslQop()); saslProps.put(Sasl.SERVER_AUTH, "true"); saslProps.put("com.sun.security.sasl.digest.cipher", encryptionAlgorithm); return saslProps; } private static void doSaslNegotiation(Configuration conf, Channel channel, int timeoutMs, String username, char[] password, Map<String, String> saslProps, Promise<Void> saslPromise, DFSClient dfsClient) { try { channel.pipeline().addLast(new IdleStateHandler(timeoutMs, 0, 0, TimeUnit.MILLISECONDS), new ProtobufVarint32FrameDecoder(), new ProtobufDecoder(DataTransferEncryptorMessageProto.getDefaultInstance()), new SaslNegotiateHandler(conf, username, password, saslProps, timeoutMs, saslPromise, dfsClient)); } catch (SaslException e) { saslPromise.tryFailure(e); } } static void trySaslNegotiate(Configuration conf, Channel channel, DatanodeInfo dnInfo, int timeoutMs, DFSClient client, Token<BlockTokenIdentifier> accessToken, Promise<Void> saslPromise) throws IOException { SaslDataTransferClient saslClient = client.getSaslDataTransferClient(); SaslPropertiesResolver saslPropsResolver = SASL_ADAPTOR.getSaslPropsResolver(saslClient); TrustedChannelResolver trustedChannelResolver = SASL_ADAPTOR.getTrustedChannelResolver(saslClient); AtomicBoolean fallbackToSimpleAuth = SASL_ADAPTOR.getFallbackToSimpleAuth(saslClient); InetAddress addr = ((InetSocketAddress) channel.remoteAddress()).getAddress(); if (trustedChannelResolver.isTrusted() || trustedChannelResolver.isTrusted(addr)) { saslPromise.trySuccess(null); return; } DataEncryptionKey encryptionKey = client.newDataEncryptionKey(); if (encryptionKey != null) { if (LOG.isDebugEnabled()) { LOG.debug( "SASL client doing encrypted handshake for addr = " + addr + ", datanodeId = " + dnInfo); } doSaslNegotiation(conf, channel, timeoutMs, getUserNameFromEncryptionKey(encryptionKey), encryptionKeyToPassword(encryptionKey.encryptionKey), createSaslPropertiesForEncryption(encryptionKey.encryptionAlgorithm), saslPromise, client); } else if (!UserGroupInformation.isSecurityEnabled()) { if (LOG.isDebugEnabled()) { LOG.debug("SASL client skipping handshake in unsecured configuration for addr = " + addr + ", datanodeId = " + dnInfo); } saslPromise.trySuccess(null); } else if (dnInfo.getXferPort() < 1024) { if (LOG.isDebugEnabled()) { LOG.debug("SASL client skipping handshake in secured configuration with " + "privileged port for addr = " + addr + ", datanodeId = " + dnInfo); } saslPromise.trySuccess(null); } else if (fallbackToSimpleAuth != null && fallbackToSimpleAuth.get()) { if (LOG.isDebugEnabled()) { LOG.debug("SASL client skipping handshake in secured configuration with " + "unsecured cluster for addr = " + addr + ", datanodeId = " + dnInfo); } saslPromise.trySuccess(null); } else if (saslPropsResolver != null) { if (LOG.isDebugEnabled()) { LOG.debug( "SASL client doing general handshake for addr = " + addr + ", datanodeId = " + dnInfo); } doSaslNegotiation(conf, channel, timeoutMs, buildUsername(accessToken), buildClientPassword(accessToken), saslPropsResolver.getClientProperties(addr), saslPromise, client); } else { // It's a secured cluster using non-privileged ports, but no SASL. The only way this can // happen is if the DataNode has ignore.secure.ports.for.testing configured, so this is a rare // edge case. if (LOG.isDebugEnabled()) { LOG.debug("SASL client skipping handshake in secured configuration with no SASL " + "protection configured for addr = " + addr + ", datanodeId = " + dnInfo); } saslPromise.trySuccess(null); } } static Encryptor createEncryptor(Configuration conf, HdfsFileStatus stat, DFSClient client) throws IOException { FileEncryptionInfo feInfo = stat.getFileEncryptionInfo(); if (feInfo == null) { return null; } return TRANSPARENT_CRYPTO_HELPER.createEncryptor(conf, feInfo, client); } }
apache/httpcomponents-core
35,476
httpcore5/src/main/java/org/apache/hc/core5/reactor/ssl/SSLIOSession.java
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.reactor.ssl; import java.io.IOException; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ByteChannel; import java.nio.channels.CancelledKeyException; import java.nio.channels.ClosedChannelException; import java.nio.channels.SelectionKey; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.Lock; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLEngineResult; import javax.net.ssl.SSLEngineResult.HandshakeStatus; import javax.net.ssl.SSLException; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLSession; import org.apache.hc.core5.annotation.Contract; import org.apache.hc.core5.annotation.Internal; import org.apache.hc.core5.annotation.ThreadingBehavior; import org.apache.hc.core5.concurrent.FutureCallback; import org.apache.hc.core5.function.Callback; import org.apache.hc.core5.io.CloseMode; import org.apache.hc.core5.io.SocketTimeoutExceptionFactory; import org.apache.hc.core5.net.NamedEndpoint; import org.apache.hc.core5.reactor.Command; import org.apache.hc.core5.reactor.EventMask; import org.apache.hc.core5.reactor.IOEventHandler; import org.apache.hc.core5.reactor.IOSession; import org.apache.hc.core5.util.Args; import org.apache.hc.core5.util.Asserts; import org.apache.hc.core5.util.Timeout; /** * {@code SSLIOSession} is a decorator class intended to transparently extend * an {@link IOSession} with transport layer security capabilities based on * the SSL/TLS protocol. * * @since 4.2 */ @Contract(threading = ThreadingBehavior.SAFE_CONDITIONAL) @Internal public class SSLIOSession implements IOSession { enum TLSHandShakeState { READY, INITIALIZED, HANDSHAKING, COMPLETE } private static final ByteBuffer EMPTY_BUFFER = ByteBuffer.allocate(0); private final NamedEndpoint targetEndpoint; private final IOSession session; private final SSLEngine sslEngine; private final SSLManagedBuffer inEncrypted; private final SSLManagedBuffer outEncrypted; private final SSLManagedBuffer inPlain; private final SSLSessionInitializer initializer; private final SSLSessionVerifier verifier; private final Callback<SSLIOSession> sessionStartCallback; private final Callback<SSLIOSession> sessionEndCallback; private final AtomicReference<FutureCallback<SSLSession>> handshakeCallbackRef; private final Timeout handshakeTimeout; private final SSLMode sslMode; private final AtomicInteger outboundClosedCount; private final AtomicReference<TLSHandShakeState> handshakeStateRef; private final IOEventHandler internalEventHandler; private final int packetBufferSize; private int appEventMask; private volatile boolean endOfStream; private volatile Status status = Status.ACTIVE; private volatile Timeout socketTimeout; private volatile TlsDetails tlsDetails; private volatile boolean appClosed; /** * Creates new instance of {@code SSLIOSession} class. * * @param session I/O session to be decorated with the TLS/SSL capabilities. * @param sslMode SSL mode (client or server) * @param targetEndpoint target endpoint (applicable in client mode only). May be {@code null}. * @param sslContext SSL context to use for this I/O session. * @param sslBufferMode buffer management mode * @param initializer optional SSL session initializer. May be {@code null}. * @param verifier optional SSL session verifier. May be {@code null}. * @param connectTimeout timeout to apply for the TLS/SSL handshake. May be {@code null}. * * @since 5.0 */ public SSLIOSession( final NamedEndpoint targetEndpoint, final IOSession session, final SSLMode sslMode, final SSLContext sslContext, final SSLBufferMode sslBufferMode, final SSLSessionInitializer initializer, final SSLSessionVerifier verifier, final Callback<SSLIOSession> sessionStartCallback, final Callback<SSLIOSession> sessionEndCallback, final Timeout connectTimeout) { this(targetEndpoint, session, sslMode, sslContext, sslBufferMode, initializer, verifier, connectTimeout, sessionStartCallback, sessionEndCallback, null); } /** * Creates new instance of {@code SSLIOSession} class. * * @param session I/O session to be decorated with the TLS/SSL capabilities. * @param sslMode SSL mode (client or server) * @param targetEndpoint target endpoint (applicable in client mode only). May be {@code null}. * @param sslContext SSL context to use for this I/O session. * @param sslBufferMode buffer management mode * @param initializer optional SSL session initializer. May be {@code null}. * @param verifier optional SSL session verifier. May be {@code null}. * @param handshakeTimeout timeout to apply for the TLS/SSL handshake. May be {@code null}. * @param resultCallback result callback. May be {@code null}. * * @since 5.2 */ public SSLIOSession( final NamedEndpoint targetEndpoint, final IOSession session, final SSLMode sslMode, final SSLContext sslContext, final SSLBufferMode sslBufferMode, final SSLSessionInitializer initializer, final SSLSessionVerifier verifier, final Timeout handshakeTimeout, final Callback<SSLIOSession> sessionStartCallback, final Callback<SSLIOSession> sessionEndCallback, final FutureCallback<SSLSession> resultCallback) { super(); Args.notNull(session, "IO session"); Args.notNull(sslContext, "SSL context"); this.targetEndpoint = targetEndpoint; this.session = session; this.sslMode = sslMode; this.initializer = initializer; this.verifier = verifier; this.sessionStartCallback = sessionStartCallback; this.sessionEndCallback = sessionEndCallback; this.handshakeCallbackRef = new AtomicReference<>(resultCallback); this.appEventMask = session.getEventMask(); if (this.sslMode == SSLMode.CLIENT && targetEndpoint != null) { this.sslEngine = sslContext.createSSLEngine(targetEndpoint.getHostName(), targetEndpoint.getPort()); } else { this.sslEngine = sslContext.createSSLEngine(); } final SSLSession sslSession = this.sslEngine.getSession(); // Allocate buffers for network (encrypted) data this.packetBufferSize = sslSession.getPacketBufferSize(); this.inEncrypted = SSLManagedBuffer.create(sslBufferMode, packetBufferSize); this.outEncrypted = SSLManagedBuffer.create(sslBufferMode, packetBufferSize); // Allocate buffers for application (unencrypted) data final int appBufferSize = sslSession.getApplicationBufferSize(); this.inPlain = SSLManagedBuffer.create(sslBufferMode, appBufferSize); this.outboundClosedCount = new AtomicInteger(0); this.handshakeStateRef = new AtomicReference<>(TLSHandShakeState.READY); this.handshakeTimeout = handshakeTimeout; this.internalEventHandler = new IOEventHandler() { @Override public void connected(final IOSession protocolSession) throws IOException { beginHandshake(protocolSession); } @Override public void inputReady(final IOSession protocolSession, final ByteBuffer src) throws IOException { receiveEncryptedData(); doHandshake(protocolSession); decryptData(protocolSession); updateEventMask(); } @Override public void outputReady(final IOSession protocolSession) throws IOException { encryptData(protocolSession); sendEncryptedData(); doHandshake(protocolSession); updateEventMask(); } @Override public void timeout(final IOSession protocolSession, final Timeout timeout) throws IOException { if (sslEngine.isInboundDone() && !sslEngine.isInboundDone()) { // The session failed to terminate cleanly close(CloseMode.IMMEDIATE); } if (handshakeStateRef.get() != TLSHandShakeState.COMPLETE) { exception(protocolSession, SocketTimeoutExceptionFactory.create(handshakeTimeout)); } else { ensureHandler().timeout(protocolSession, timeout); } } @Override public void exception(final IOSession protocolSession, final Exception cause) { final FutureCallback<SSLSession> resultCallback = handshakeCallbackRef.getAndSet(null); if (resultCallback != null) { resultCallback.failed(cause); } final IOEventHandler handler = session.getHandler(); if (handshakeStateRef.get() != TLSHandShakeState.COMPLETE) { if (cause instanceof SSLHandshakeException) { close(CloseMode.GRACEFUL); } else { session.close(CloseMode.GRACEFUL); close(CloseMode.IMMEDIATE); } } if (handler != null) { handler.exception(protocolSession, cause); } } @Override public void disconnected(final IOSession protocolSession) { final IOEventHandler handler = session.getHandler(); if (handler != null) { handler.disconnected(protocolSession); } } }; } private IOEventHandler ensureHandler() { final IOEventHandler handler = session.getHandler(); Asserts.notNull(handler, "IO event handler"); return handler; } @Override public IOEventHandler getHandler() { return internalEventHandler; } public void beginHandshake(final IOSession protocolSession) throws IOException { if (handshakeStateRef.compareAndSet(TLSHandShakeState.READY, TLSHandShakeState.INITIALIZED)) { initialize(protocolSession); } } private void initialize(final IOSession protocolSession) throws IOException { // Save the initial socketTimeout of the underlying IOSession, to be restored after the handshake is finished this.socketTimeout = this.session.getSocketTimeout(); if (handshakeTimeout != null) { this.session.setSocketTimeout(handshakeTimeout); } this.session.getLock().lock(); try { if (this.status.compareTo(Status.CLOSING) >= 0) { return; } switch (this.sslMode) { case CLIENT: this.sslEngine.setUseClientMode(true); break; case SERVER: this.sslEngine.setUseClientMode(false); break; } if (this.initializer != null) { this.initializer.initialize(this.targetEndpoint, this.sslEngine); } this.handshakeStateRef.set(TLSHandShakeState.HANDSHAKING); this.sslEngine.beginHandshake(); this.inEncrypted.release(); this.outEncrypted.release(); doHandshake(protocolSession); updateEventMask(); } finally { this.session.getLock().unlock(); } } // A works-around for exception handling craziness in Sun/Oracle's SSLEngine // implementation. // // sun.security.pkcs11.wrapper.PKCS11Exception is re-thrown as // plain RuntimeException in sun.security.ssl.Handshaker#checkThrown private SSLException convert(final RuntimeException ex) { Throwable cause = ex.getCause(); if (cause == null) { cause = ex; } return new SSLException(cause); } private SSLEngineResult doWrap(final ByteBuffer src, final ByteBuffer dst) throws SSLException { try { return this.sslEngine.wrap(src, dst); } catch (final RuntimeException ex) { throw convert(ex); } } private SSLEngineResult doUnwrap(final ByteBuffer src, final ByteBuffer dst) throws SSLException { try { return this.sslEngine.unwrap(src, dst); } catch (final RuntimeException ex) { throw convert(ex); } } private void doRunTask() { final Runnable r = this.sslEngine.getDelegatedTask(); if (r != null) { r.run(); } } private void doHandshake(final IOSession protocolSession) throws IOException { boolean handshaking = true; SSLEngineResult result = null; while (handshaking) { HandshakeStatus handshakeStatus = this.sslEngine.getHandshakeStatus(); // Work-around for what appears to be a bug in Conscrypt SSLEngine that does not // transition into the handshaking state upon #closeOutbound() call but still // has some handshake data stuck in its internal buffer. if (handshakeStatus == HandshakeStatus.NOT_HANDSHAKING && outboundClosedCount.get() > 0) { handshakeStatus = HandshakeStatus.NEED_WRAP; } switch (handshakeStatus) { case NEED_WRAP: // Generate outgoing handshake data this.session.getLock().lock(); try { // Acquire buffers final ByteBuffer outEncryptedBuf = this.outEncrypted.acquire(); // Just wrap an empty buffer because there is no data to write. result = doWrap(EMPTY_BUFFER, outEncryptedBuf); if (result.getStatus() != SSLEngineResult.Status.OK || result.getHandshakeStatus() == HandshakeStatus.NEED_WRAP) { handshaking = false; } break; } finally { this.session.getLock().unlock(); } case NEED_UNWRAP: // Process incoming handshake data // Acquire buffers final ByteBuffer inEncryptedBuf = this.inEncrypted.acquire(); final ByteBuffer inPlainBuf = this.inPlain.acquire(); // Perform operations inEncryptedBuf.flip(); try { result = doUnwrap(inEncryptedBuf, inPlainBuf); } finally { inEncryptedBuf.compact(); } try { if (!inEncryptedBuf.hasRemaining() && result.getHandshakeStatus() == HandshakeStatus.NEED_UNWRAP) { throw new SSLException("Input buffer is full"); } } finally { // Release inEncrypted if empty if (inEncryptedBuf.position() == 0) { this.inEncrypted.release(); } } if (this.status.compareTo(Status.CLOSING) >= 0) { this.inPlain.release(); } if (result.getStatus() != SSLEngineResult.Status.OK) { handshaking = false; } break; case NEED_TASK: doRunTask(); break; case NOT_HANDSHAKING: handshaking = false; break; } } // The SSLEngine has just finished handshaking. This value is only generated by a call // to SSLEngine.wrap()/unwrap() when that call finishes a handshake. // It is never generated by SSLEngine.getHandshakeStatus(). if (result != null && result.getHandshakeStatus() == HandshakeStatus.FINISHED) { this.handshakeStateRef.set(TLSHandShakeState.COMPLETE); this.session.setSocketTimeout(this.socketTimeout); if (this.verifier != null) { this.tlsDetails = this.verifier.verify(this.targetEndpoint, this.sslEngine); } String applicationProtocol; if (this.tlsDetails == null) { final SSLSession sslSession = this.sslEngine.getSession(); try { applicationProtocol = this.sslEngine.getApplicationProtocol(); } catch (final UnsupportedOperationException e) { // If the underlying provider does not support the operation, the getApplicationProtocol() method throws an UnsupportedOperationException. // In this case, we fall back to "http/1.1" as the application protocol. // This is a workaround to allow older applications that do not support the getApplicationProtocol() method to continue working. // This workaround is temporary and is meant to maintain compatibility with older systems. applicationProtocol = "http/1.1"; } this.tlsDetails = new TlsDetails(sslSession, applicationProtocol); } ensureHandler().connected(protocolSession); if (this.sessionStartCallback != null) { this.sessionStartCallback.execute(this); } final FutureCallback<SSLSession> resultCallback = handshakeCallbackRef.getAndSet(null); if (resultCallback != null) { resultCallback.completed(sslEngine.getSession()); } } } private void updateEventMask() { this.session.getLock().lock(); try { // Graceful session termination if (this.status == Status.ACTIVE && (this.endOfStream || this.sslEngine.isInboundDone())) { this.status = Status.CLOSING; final FutureCallback<SSLSession> resultCallback = handshakeCallbackRef.getAndSet(null); if (resultCallback != null) { resultCallback.failed(new SSLHandshakeException("TLS handshake failed")); } } if (this.status == Status.CLOSING && !this.outEncrypted.hasData()) { this.sslEngine.closeOutbound(); this.outboundClosedCount.incrementAndGet(); } final HandshakeStatus handshakeStatus = this.sslEngine.getHandshakeStatus(); if (this.status == Status.CLOSING && (handshakeStatus == HandshakeStatus.NOT_HANDSHAKING || handshakeStatus == HandshakeStatus.FINISHED) && !this.outEncrypted.hasData() && this.sslEngine.isOutboundDone() && (this.endOfStream || this.sslEngine.isInboundDone()) && appClosed) { this.status = Status.CLOSED; } // Abnormal session termination if (this.status.compareTo(Status.CLOSING) <= 0 && this.endOfStream && handshakeStatus == HandshakeStatus.NEED_UNWRAP) { this.status = Status.CLOSED; } if (this.status == Status.CLOSED) { this.session.close(); if (sessionEndCallback != null) { sessionEndCallback.execute(this); } return; } // Is there a task pending? if (handshakeStatus == HandshakeStatus.NEED_TASK) { doRunTask(); } // Need to toggle the event mask for this channel? final int oldMask = this.session.getEventMask(); int newMask = oldMask; switch (this.sslEngine.getHandshakeStatus()) { case NEED_WRAP: newMask = EventMask.READ_WRITE; break; case NEED_UNWRAP: newMask = EventMask.READ; break; case NOT_HANDSHAKING: newMask = this.appEventMask; break; } if (this.endOfStream && !this.inPlain.hasData()) { newMask = newMask & ~EventMask.READ; } else if (this.status == Status.CLOSING) { newMask = newMask | EventMask.READ; } // Do we have encrypted data ready to be sent? if (this.outEncrypted.hasData()) { newMask = newMask | EventMask.WRITE; } // Update the mask if necessary if (oldMask != newMask) { this.session.setEventMask(newMask); } } finally { this.session.getLock().unlock(); } } private int sendEncryptedData() throws IOException { this.session.getLock().lock(); try { if (this.status == Status.ACTIVE && !this.outEncrypted.hasData()) { // If the buffer isn't acquired or is empty, call write() with an empty buffer. // This will ensure that tests performed by write() still take place without // having to acquire and release an empty buffer (e.g. connection closed, // interrupted thread, etc..) return this.session.write(EMPTY_BUFFER); } // Acquire buffer final ByteBuffer outEncryptedBuf = this.outEncrypted.acquire(); // Clear output buffer if the session has been closed // in case there is still `close_notify` data stuck in it if (this.status == Status.CLOSED) { outEncryptedBuf.clear(); } // Perform operation int bytesWritten = 0; if (outEncryptedBuf.position() > 0) { outEncryptedBuf.flip(); try { bytesWritten = this.session.write(outEncryptedBuf); } finally { outEncryptedBuf.compact(); } } // Release if empty if (outEncryptedBuf.position() == 0) { this.outEncrypted.release(); } return bytesWritten; } finally { this.session.getLock().unlock(); } } private int receiveEncryptedData() throws IOException { if (this.endOfStream) { return -1; } // Acquire buffer final ByteBuffer inEncryptedBuf = this.inEncrypted.acquire(); // Perform operation final int bytesRead = this.session.read(inEncryptedBuf); // Release if empty if (inEncryptedBuf.position() == 0) { this.inEncrypted.release(); } if (bytesRead == -1) { this.endOfStream = true; } return bytesRead; } private void decryptData(final IOSession protocolSession) throws IOException { final HandshakeStatus handshakeStatus = sslEngine.getHandshakeStatus(); if ((handshakeStatus == HandshakeStatus.NOT_HANDSHAKING || handshakeStatus == HandshakeStatus.FINISHED) && inEncrypted.hasData()) { final ByteBuffer inEncryptedBuf = inEncrypted.acquire(); inEncryptedBuf.flip(); try { while (inEncryptedBuf.hasRemaining()) { final ByteBuffer inPlainBuf = inPlain.acquire(); try { final SSLEngineResult result = doUnwrap(inEncryptedBuf, inPlainBuf); if (!inEncryptedBuf.hasRemaining() && result.getHandshakeStatus() == HandshakeStatus.NEED_UNWRAP) { throw new SSLException("Unable to complete SSL handshake"); } if (sslEngine.isInboundDone()) { endOfStream = true; } if (inPlainBuf.position() > 0) { inPlainBuf.flip(); try { ensureHandler().inputReady(protocolSession, inPlainBuf.hasRemaining() ? inPlainBuf : null); } finally { inPlainBuf.clear(); } } if (result.getStatus() == SSLEngineResult.Status.BUFFER_UNDERFLOW && endOfStream) { throw new SSLException("Unable to decrypt incoming data due to unexpected end of stream"); } if (result.getStatus() != SSLEngineResult.Status.OK || result.getHandshakeStatus() != HandshakeStatus.NOT_HANDSHAKING && result.getHandshakeStatus() != HandshakeStatus.FINISHED) { break; } } finally { inPlain.release(); } } } finally { inEncryptedBuf.compact(); // Release inEncrypted if empty if (inEncryptedBuf.position() == 0) { inEncrypted.release(); } } } if (endOfStream && !inEncrypted.hasData()) { ensureHandler().inputReady(protocolSession, null); } } private void encryptData(final IOSession protocolSession) throws IOException { final boolean appReady; this.session.getLock().lock(); try { appReady = (this.appEventMask & SelectionKey.OP_WRITE) > 0 && this.status.compareTo(Status.CLOSED) < 0 && this.sslEngine.getHandshakeStatus() == HandshakeStatus.NOT_HANDSHAKING; } finally { this.session.getLock().unlock(); } if (appReady) { ensureHandler().outputReady(protocolSession); } } @Override public int write(final ByteBuffer src) throws IOException { Args.notNull(src, "Byte buffer"); this.session.getLock().lock(); try { if (this.status != Status.ACTIVE) { throw new ClosedChannelException(); } if (this.handshakeStateRef.get() == TLSHandShakeState.READY) { return 0; } for (;;) { final ByteBuffer outEncryptedBuf = this.outEncrypted.acquire(); final SSLEngineResult result = doWrap(src, outEncryptedBuf); if (result.getStatus() == SSLEngineResult.Status.BUFFER_OVERFLOW) { // We don't release the buffer here, it will be expanded (if needed) // and returned by the next attempt of SSLManagedBuffer#acquire() call. this.outEncrypted.ensureWriteable(packetBufferSize); } else { return result.bytesConsumed(); } } } finally { this.session.getLock().unlock(); } } @Override public int read(final ByteBuffer dst) { return endOfStream ? -1 : 0; } @Override public String getId() { return session.getId(); } @Override public Lock getLock() { return this.session.getLock(); } @Override public void upgrade(final IOEventHandler handler) { this.session.upgrade(handler); } public TlsDetails getTlsDetails() { return tlsDetails; } @Override public boolean isOpen() { return this.status == Status.ACTIVE && this.session.isOpen(); } @Override public void close() { close(CloseMode.GRACEFUL); } @Override public void close(final CloseMode closeMode) { this.session.getLock().lock(); try { appClosed = true; if (closeMode == CloseMode.GRACEFUL) { if (this.status.compareTo(Status.CLOSING) >= 0) { return; } this.status = Status.CLOSING; if (this.session.getSocketTimeout().isDisabled()) { this.session.setSocketTimeout(Timeout.ofMilliseconds(1000)); } try { // Catch all unchecked exceptions in case something goes wrong // in the JSSE provider. For instance // com.android.org.conscrypt.NativeCrypto#SSL_get_shutdown can // throw NPE at this point doHandshake(this); sendEncryptedData(); updateEventMask(); } catch (final CancelledKeyException ex) { this.session.close(CloseMode.GRACEFUL); } catch (final Exception ex) { this.session.close(CloseMode.IMMEDIATE); } } else { if (this.status == Status.CLOSED) { return; } this.inEncrypted.release(); this.outEncrypted.release(); this.inPlain.release(); this.status = Status.CLOSED; this.session.close(closeMode); } } finally { this.session.getLock().unlock(); } } @Override public Status getStatus() { return this.status; } @Override public void enqueue(final Command command, final Command.Priority priority) { this.session.getLock().lock(); try { this.session.enqueue(command, priority); if (isOpen()) { setEvent(SelectionKey.OP_WRITE); } else { command.cancel(); } } finally { this.session.getLock().unlock(); } } @Override public boolean hasCommands() { return this.session.hasCommands(); } @Override public Command poll() { return this.session.poll(); } @Override public ByteChannel channel() { return this.session.channel(); } @Override public SocketAddress getLocalAddress() { return this.session.getLocalAddress(); } @Override public SocketAddress getRemoteAddress() { return this.session.getRemoteAddress(); } @Override public int getEventMask() { this.session.getLock().lock(); try { return this.appEventMask; } finally { this.session.getLock().unlock(); } } @Override public void setEventMask(final int ops) { this.session.getLock().lock(); try { this.appEventMask = ops; updateEventMask(); } finally { this.session.getLock().unlock(); } } @Override public void setEvent(final int op) { this.session.getLock().lock(); try { this.appEventMask = this.appEventMask | op; updateEventMask(); } finally { this.session.getLock().unlock(); } } @Override public void clearEvent(final int op) { this.session.getLock().lock(); try { this.appEventMask = this.appEventMask & ~op; updateEventMask(); } finally { this.session.getLock().unlock(); } } @Override public Timeout getSocketTimeout() { return this.session.getSocketTimeout(); } @Override public void setSocketTimeout(final Timeout timeout) { this.socketTimeout = timeout; if (this.sslEngine.getHandshakeStatus() == HandshakeStatus.FINISHED) { this.session.setSocketTimeout(timeout); } } @Override public void updateReadTime() { this.session.updateReadTime(); } @Override public void updateWriteTime() { this.session.updateWriteTime(); } @Override public long getLastReadTime() { return this.session.getLastReadTime(); } @Override public long getLastWriteTime() { return this.session.getLastWriteTime(); } @Override public long getLastEventTime() { return this.session.getLastEventTime(); } private static void formatOps(final StringBuilder buffer, final int ops) { if ((ops & SelectionKey.OP_READ) > 0) { buffer.append('r'); } if ((ops & SelectionKey.OP_WRITE) > 0) { buffer.append('w'); } } @Override public String toString() { this.session.getLock().lock(); try { final StringBuilder buffer = new StringBuilder(); buffer.append(this.session); buffer.append("["); buffer.append(this.status); buffer.append("]["); formatOps(buffer, this.appEventMask); buffer.append("]["); buffer.append(this.sslEngine.getHandshakeStatus()); if (this.sslEngine.isInboundDone()) { buffer.append("][inbound done]["); } if (this.sslEngine.isOutboundDone()) { buffer.append("][outbound done]["); } if (this.endOfStream) { buffer.append("][EOF]["); } buffer.append("]["); buffer.append(!this.inEncrypted.hasData() ? 0 : inEncrypted.acquire().position()); buffer.append("]["); buffer.append(!this.inPlain.hasData() ? 0 : inPlain.acquire().position()); buffer.append("]["); buffer.append(!this.outEncrypted.hasData() ? 0 : outEncrypted.acquire().position()); buffer.append("]"); return buffer.toString(); } finally { this.session.getLock().unlock(); } } }
apache/nifi
35,556
nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; import org.apache.nifi.database.dialect.service.api.ColumnDefinition; import org.apache.nifi.database.dialect.service.api.StandardColumnDefinition; import org.apache.nifi.database.dialect.service.api.DatabaseDialectService; import org.apache.nifi.database.dialect.service.api.QueryStatementRequest; import org.apache.nifi.database.dialect.service.api.StandardQueryStatementRequest; import org.apache.nifi.database.dialect.service.api.StatementResponse; import org.apache.nifi.database.dialect.service.api.StatementType; import org.apache.nifi.database.dialect.service.api.TableDefinition; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.AttributeExpression; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessSessionFactory; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.sql.SqlWriter; import org.apache.nifi.util.StopWatch; import org.apache.nifi.util.db.JdbcCommon; import java.io.IOException; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.text.ParseException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.IntStream; public abstract class AbstractQueryDatabaseTable extends AbstractDatabaseFetchProcessor { public static final String RESULT_TABLENAME = "tablename"; public static final String RESULT_ROW_COUNT = "querydbtable.row.count"; private static final AllowableValue TRANSACTION_READ_COMMITTED = new AllowableValue( String.valueOf(Connection.TRANSACTION_READ_COMMITTED), "TRANSACTION_READ_COMMITTED" ); private static final AllowableValue TRANSACTION_READ_UNCOMMITTED = new AllowableValue( String.valueOf(Connection.TRANSACTION_READ_UNCOMMITTED), "TRANSACTION_READ_UNCOMMITTED" ); private static final AllowableValue TRANSACTION_REPEATABLE_READ = new AllowableValue( String.valueOf(Connection.TRANSACTION_REPEATABLE_READ), "TRANSACTION_REPEATABLE_READ" ); private static final AllowableValue TRANSACTION_NONE = new AllowableValue( String.valueOf(Connection.TRANSACTION_NONE), "TRANSACTION_NONE" ); private static final AllowableValue TRANSACTION_SERIALIZABLE = new AllowableValue( String.valueOf(Connection.TRANSACTION_SERIALIZABLE), "TRANSACTION_SERIALIZABLE" ); private static final String FETCH_SIZE_NAME = "Fetch Size"; private static final String AUTO_COMMIT_NAME = "Set Auto Commit"; public static final PropertyDescriptor FETCH_SIZE = new PropertyDescriptor.Builder() .name(FETCH_SIZE_NAME) .description("The number of result rows to be fetched from the result set at a time. This is a hint to the database driver and may not be " + "honored and/or exact. If the value specified is zero, then the hint is ignored. " + "If using PostgreSQL, then '" + AUTO_COMMIT_NAME + "' must be equal to 'false' to cause '" + FETCH_SIZE_NAME + "' to take effect.") .defaultValue("0") .required(true) .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .build(); public static final PropertyDescriptor AUTO_COMMIT = new PropertyDescriptor.Builder() .name(AUTO_COMMIT_NAME) .description("Allows enabling or disabling the auto commit functionality of the DB connection. Default value is 'No value set'. " + "'No value set' will leave the db connection's auto commit mode unchanged. " + "For some JDBC drivers such as PostgreSQL driver, it is required to disable the auto commit functionality " + "to get the '" + FETCH_SIZE_NAME + "' setting to take effect. " + "When auto commit is enabled, PostgreSQL driver ignores '" + FETCH_SIZE_NAME + "' setting and loads all rows of the result set to memory at once. " + "This could lead for a large amount of memory usage when executing queries which fetch large data sets. " + "More Details of this behaviour in PostgreSQL driver can be found in https://jdbc.postgresql.org//documentation/head/query.html.") .allowableValues("true", "false") .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .required(false) .build(); public static final PropertyDescriptor MAX_ROWS_PER_FLOW_FILE = new PropertyDescriptor.Builder() .name("qdbt-max-rows") .displayName("Max Rows Per Flow File") .description("The maximum number of result rows that will be included in a single FlowFile. This will allow you to break up very large " + "result sets into multiple FlowFiles. If the value specified is zero, then all rows are returned in a single FlowFile.") .defaultValue("0") .required(true) .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .build(); public static final PropertyDescriptor OUTPUT_BATCH_SIZE = new PropertyDescriptor.Builder() .name("qdbt-output-batch-size") .displayName("Output Batch Size") .description("The number of output FlowFiles to queue before committing the process session. When set to zero, the session will be committed when all result set rows " + "have been processed and the output FlowFiles are ready for transfer to the downstream relationship. For large result sets, this can cause a large burst of FlowFiles " + "to be transferred at the end of processor execution. If this property is set, then when the specified number of FlowFiles are ready for transfer, then the session will " + "be committed, thus releasing the FlowFiles to the downstream relationship. NOTE: The maxvalue.* and fragment.count attributes will not be set on FlowFiles when this " + "property is set.") .defaultValue("0") .required(true) .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .build(); public static final PropertyDescriptor MAX_FRAGMENTS = new PropertyDescriptor.Builder() .name("qdbt-max-frags") .displayName("Maximum Number of Fragments") .description("The maximum number of fragments. If the value specified is zero, then all fragments are returned. " + "This prevents OutOfMemoryError when this processor ingests huge table. NOTE: Setting this property can result in data loss, as the incoming results are " + "not ordered, and fragments may end at arbitrary boundaries where rows are not included in the result set.") .defaultValue("0") .required(true) .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .build(); public static final PropertyDescriptor TRANS_ISOLATION_LEVEL = new PropertyDescriptor.Builder() .name("transaction-isolation-level") .displayName("Transaction Isolation Level") .description("This setting will set the transaction isolation level for the database connection for drivers that support this setting") .required(false) .allowableValues(TRANSACTION_NONE, TRANSACTION_READ_COMMITTED, TRANSACTION_READ_UNCOMMITTED, TRANSACTION_REPEATABLE_READ, TRANSACTION_SERIALIZABLE) .build(); public static final AllowableValue INITIAL_LOAD_STRATEGY_ALL_ROWS = new AllowableValue("Start at Beginning", "Start at Beginning", "Loads all existing rows from the database table."); public static final AllowableValue INITIAL_LOAD_STRATEGY_NEW_ROWS = new AllowableValue("Start at Current Maximum Values", "Start at Current Maximum Values", "Loads only the newly " + "inserted or updated rows based on the maximum value(s) of the column(s) configured in the '" + MAX_VALUE_COLUMN_NAMES.getDisplayName() + "' property."); public static final PropertyDescriptor INITIAL_LOAD_STRATEGY = new PropertyDescriptor.Builder() .name("initial-load-strategy") .displayName("Initial Load Strategy") .description("How to handle existing rows in the database table when the processor is started for the first time (or its state has been cleared). The property will be ignored, " + "if any '" + INITIAL_MAX_VALUE_PROP_START + "*' dynamic property has also been configured.") .required(true) .allowableValues(INITIAL_LOAD_STRATEGY_ALL_ROWS, INITIAL_LOAD_STRATEGY_NEW_ROWS) .defaultValue(INITIAL_LOAD_STRATEGY_ALL_ROWS.getValue()) .build(); @Override public Set<Relationship> getRelationships() { return relationships; } @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { return propDescriptors; } @Override protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) { return new PropertyDescriptor.Builder() .name(propertyDescriptorName) .required(false) .addValidator(StandardValidators.createAttributeExpressionLanguageValidator(AttributeExpression.ResultType.STRING, true)) .addValidator(StandardValidators.ATTRIBUTE_KEY_PROPERTY_NAME_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT) .dynamic(true) .build(); } @Override protected Collection<ValidationResult> customValidate(ValidationContext validationContext) { final List<ValidationResult> results = new ArrayList<>(super.customValidate(validationContext)); final boolean maxValueColumnNames = validationContext.getProperty(MAX_VALUE_COLUMN_NAMES).isSet(); final String initialLoadStrategy = validationContext.getProperty(INITIAL_LOAD_STRATEGY).getValue(); if (!maxValueColumnNames && initialLoadStrategy.equals(INITIAL_LOAD_STRATEGY_NEW_ROWS.getValue())) { results.add(new ValidationResult.Builder().valid(false) .subject(INITIAL_LOAD_STRATEGY.getDisplayName()) .input(INITIAL_LOAD_STRATEGY_NEW_ROWS.getDisplayName()) .explanation(String.format("'%s' strategy can only be used when '%s' property is also configured", INITIAL_LOAD_STRATEGY_NEW_ROWS.getDisplayName(), MAX_VALUE_COLUMN_NAMES.getDisplayName())) .build()); } return results; } @Override @OnScheduled public void setup(final ProcessContext context) { maxValueProperties = getDefaultMaxValueProperties(context, null); } @OnStopped public void stop() { // Reset the column type map in case properties change setupComplete.set(false); } @Override public void onTrigger(final ProcessContext context, final ProcessSessionFactory sessionFactory) throws ProcessException { // Fetch the column/table info once if (!setupComplete.get()) { super.setup(context); } ProcessSession session = sessionFactory.createSession(); final List<FlowFile> resultSetFlowFiles = new ArrayList<>(); final ComponentLog logger = getLogger(); final DBCPService dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class); final DatabaseDialectService databaseDialectService = getDatabaseDialectService(context); final String databaseType = context.getProperty(DB_TYPE).getValue(); final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions().getValue(); final String columnNames = context.getProperty(COLUMN_NAMES).evaluateAttributeExpressions().getValue(); final String sqlQuery = context.getProperty(SQL_QUERY).evaluateAttributeExpressions().getValue(); final String maxValueColumnNames = context.getProperty(MAX_VALUE_COLUMN_NAMES).evaluateAttributeExpressions().getValue(); final String initialLoadStrategy = context.getProperty(INITIAL_LOAD_STRATEGY).getValue(); final String customWhereClause = context.getProperty(WHERE_CLAUSE).evaluateAttributeExpressions().getValue(); final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.SECONDS).intValue(); final Integer fetchSize = context.getProperty(FETCH_SIZE).evaluateAttributeExpressions().asInteger(); final Integer maxRowsPerFlowFile = context.getProperty(MAX_ROWS_PER_FLOW_FILE).evaluateAttributeExpressions().asInteger(); final Integer outputBatchSizeField = context.getProperty(OUTPUT_BATCH_SIZE).evaluateAttributeExpressions().asInteger(); final int outputBatchSize = outputBatchSizeField == null ? 0 : outputBatchSizeField; final Integer maxFragments = context.getProperty(MAX_FRAGMENTS).isSet() ? context.getProperty(MAX_FRAGMENTS).evaluateAttributeExpressions().asInteger() : 0; final Integer transIsolationLevel = context.getProperty(TRANS_ISOLATION_LEVEL).isSet() ? context.getProperty(TRANS_ISOLATION_LEVEL).asInteger() : null; SqlWriter sqlWriter = configureSqlWriter(session, context); final StateMap stateMap; try { stateMap = session.getState(Scope.CLUSTER); } catch (final IOException ioe) { getLogger().error("Failed to retrieve observed maximum values from the State Manager. Will not perform " + "query until this is accomplished.", ioe); context.yield(); return; } // Make a mutable copy of the current state property map. This will be updated by the result row callback, and eventually // set as the current state map (after the session has been committed) final Map<String, String> statePropertyMap = new HashMap<>(stateMap.toMap()); //If an initial max value for column(s) has been specified using properties, and this column is not in the state manager, sync them to the state property map for (final Map.Entry<String, String> maxProp : maxValueProperties.entrySet()) { String maxPropKey = maxProp.getKey().toLowerCase(); String fullyQualifiedMaxPropKey = getStateKey(tableName, maxPropKey); if (!statePropertyMap.containsKey(fullyQualifiedMaxPropKey)) { String newMaxPropValue; // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme) // the value has been stored under a key that is only the column name. Fall back to check the column name, // but store the new initial max value under the fully-qualified key. if (statePropertyMap.containsKey(maxPropKey)) { newMaxPropValue = statePropertyMap.get(maxPropKey); } else { newMaxPropValue = maxProp.getValue(); } statePropertyMap.put(fullyQualifiedMaxPropKey, newMaxPropValue); } } List<String> maxValueColumnNameList = StringUtils.isEmpty(maxValueColumnNames) ? null : Arrays.asList(maxValueColumnNames.split("\\s*,\\s*")); if (maxValueColumnNameList != null && statePropertyMap.isEmpty() && initialLoadStrategy.equals(INITIAL_LOAD_STRATEGY_NEW_ROWS.getValue())) { final List<ColumnDefinition> maxValueColumnDefinitions = maxValueColumnNameList.stream() .map(columnName -> String.format("MAX(%s) %s", columnName, columnName)) .map(StandardColumnDefinition::new) .map(ColumnDefinition.class::cast) .toList(); final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, maxValueColumnDefinitions); final QueryStatementRequest statementRequest = new StandardQueryStatementRequest(StatementType.SELECT, tableDefinition); final StatementResponse maxValueStatementResponse = databaseDialectService.getStatement(statementRequest); final String selectMaxQuery = maxValueStatementResponse.sql(); try (final Connection con = dbcpService.getConnection(Collections.emptyMap()); final Statement st = con.createStatement()) { if (transIsolationLevel != null) { con.setTransactionIsolation(transIsolationLevel); } st.setQueryTimeout(queryTimeout); // timeout in seconds try (final ResultSet resultSet = st.executeQuery(selectMaxQuery)) { if (resultSet.next()) { final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap); maxValCollector.processRow(resultSet); maxValCollector.applyStateChanges(); } } } catch (final Exception e) { logger.error("Unable to execute SQL select query {} due to {}", selectMaxQuery, e); context.yield(); } } final List<String> parsedColumnNames; if (columnNames == null) { parsedColumnNames = List.of(); } else { parsedColumnNames = Arrays.asList(columnNames.split(", ")); } final String selectQuery = getQuery(databaseDialectService, databaseType, tableName, sqlQuery, parsedColumnNames, maxValueColumnNameList, customWhereClause, statePropertyMap); final StopWatch stopWatch = new StopWatch(true); final String fragmentIdentifier = UUID.randomUUID().toString(); try (final Connection con = dbcpService.getConnection(Collections.emptyMap()); final Statement st = con.createStatement()) { if (fetchSize != null && fetchSize > 0) { try { st.setFetchSize(fetchSize); } catch (SQLException se) { // Not all drivers support this, just log the error (at debug level) and move on logger.debug("Cannot set fetch size to {} due to {}", fetchSize, se.getLocalizedMessage(), se); } } if (transIsolationLevel != null) { con.setTransactionIsolation(transIsolationLevel); } String jdbcURL = "DBCPService"; try { DatabaseMetaData databaseMetaData = con.getMetaData(); if (databaseMetaData != null) { jdbcURL = databaseMetaData.getURL(); } } catch (SQLException ignored) { // Ignore and use default JDBC URL. This shouldn't happen unless the driver doesn't implement getMetaData() properly } st.setQueryTimeout(queryTimeout); // timeout in seconds if (logger.isDebugEnabled()) { logger.debug("Executing query {}", selectQuery); } final boolean originalAutoCommit = con.getAutoCommit(); final Boolean setAutoCommitValue = context.getProperty(AUTO_COMMIT).evaluateAttributeExpressions().asBoolean(); // If user sets AUTO_COMMIT property to non-null (i.e. true or false), then the property value overrides the dbAdapter's value if (setAutoCommitValue != null && originalAutoCommit != setAutoCommitValue) { try { con.setAutoCommit(setAutoCommitValue); logger.debug("Driver connection changed to setAutoCommit({})", setAutoCommitValue); } catch (Exception ex) { logger.debug("Failed to setAutoCommit({}) due to {}: {}", setAutoCommitValue, ex.getClass().getName(), ex.getMessage()); } } try (final ResultSet resultSet = st.executeQuery(selectQuery)) { int fragmentIndex = 0; // Max values will be updated in the state property map by the callback final MaxValueResultSetRowCollector maxValCollector = new MaxValueResultSetRowCollector(tableName, statePropertyMap); while (true) { final AtomicLong nrOfRows = new AtomicLong(0L); FlowFile fileToProcess = session.create(); try { fileToProcess = session.write(fileToProcess, out -> { try { nrOfRows.set(sqlWriter.writeResultSet(resultSet, out, getLogger(), maxValCollector)); } catch (Exception e) { throw new ProcessException("Error during database query or conversion of records.", e); } }); } catch (ProcessException e) { // Add flowfile to results before rethrowing so it will be removed from session in outer catch resultSetFlowFiles.add(fileToProcess); throw e; } if (nrOfRows.get() > 0) { // set attributes final Map<String, String> attributesToAdd = new HashMap<>(); attributesToAdd.put(RESULT_ROW_COUNT, String.valueOf(nrOfRows.get())); attributesToAdd.put(RESULT_TABLENAME, tableName); if (maxRowsPerFlowFile > 0) { attributesToAdd.put(FRAGMENT_ID, fragmentIdentifier); attributesToAdd.put(FRAGMENT_INDEX, String.valueOf(fragmentIndex)); } attributesToAdd.putAll(sqlWriter.getAttributesToAdd()); fileToProcess = session.putAllAttributes(fileToProcess, attributesToAdd); sqlWriter.updateCounters(session); logger.debug("{} contains {} records; transferring to 'success'", fileToProcess, nrOfRows.get()); session.getProvenanceReporter().receive(fileToProcess, jdbcURL, stopWatch.getElapsed(TimeUnit.MILLISECONDS)); resultSetFlowFiles.add(fileToProcess); // If we've reached the batch size, send out the flow files if (outputBatchSize > 0 && resultSetFlowFiles.size() >= outputBatchSize) { session.transfer(resultSetFlowFiles, REL_SUCCESS); session.commitAsync(); resultSetFlowFiles.clear(); } } else { // If there were no rows returned, don't send the flowfile session.remove(fileToProcess); // If no rows and this was first FlowFile, yield if (fragmentIndex == 0) { context.yield(); } break; } fragmentIndex++; if (maxFragments > 0 && fragmentIndex >= maxFragments) { break; } // If we aren't splitting up the data into flow files or fragments, then the result set has been entirely fetched so don't loop back around if (maxFragments == 0 && maxRowsPerFlowFile == 0) { break; } // If we are splitting up the data into flow files, don't loop back around if we've gotten all results if (maxRowsPerFlowFile > 0 && nrOfRows.get() < maxRowsPerFlowFile) { break; } } // Apply state changes from the Max Value tracker maxValCollector.applyStateChanges(); // Even though the maximum value and total count are known at this point, to maintain consistent behavior if Output Batch Size is set, do not store the attributes if (outputBatchSize == 0) { for (int i = 0; i < resultSetFlowFiles.size(); i++) { final Map<String, String> newAttributesMap = new HashMap<>(); // Add maximum values as attributes for (Map.Entry<String, String> entry : statePropertyMap.entrySet()) { // Get just the column name from the key String key = entry.getKey(); String colName = key.substring(key.lastIndexOf(NAMESPACE_DELIMITER) + NAMESPACE_DELIMITER.length()); newAttributesMap.put("maxvalue." + colName, entry.getValue()); } // Set count for all FlowFiles if (maxRowsPerFlowFile > 0) { newAttributesMap.put(FRAGMENT_COUNT, Integer.toString(fragmentIndex)); } resultSetFlowFiles.set(i, session.putAllAttributes(resultSetFlowFiles.get(i), newAttributesMap)); } } } catch (final SQLException e) { throw e; } finally { if (con.getAutoCommit() != originalAutoCommit) { try { con.setAutoCommit(originalAutoCommit); logger.debug("Driver connection reset to original setAutoCommit({})", originalAutoCommit); } catch (Exception ex) { logger.debug("Failed to setAutoCommit({}) due to {}: {}", originalAutoCommit, ex.getClass().getName(), ex.getMessage()); } } } session.transfer(resultSetFlowFiles, REL_SUCCESS); } catch (final ProcessException | SQLException e) { logger.error("Unable to execute SQL select query {} due to {}", selectQuery, e); if (!resultSetFlowFiles.isEmpty()) { session.remove(resultSetFlowFiles); } context.yield(); } finally { try { // Update the state session.setState(statePropertyMap, Scope.CLUSTER); } catch (IOException ioe) { getLogger().error("{} failed to update State Manager, maximum observed values will not be recorded", this, ioe); } session.commitAsync(); } } private String getQuery( final DatabaseDialectService databaseDialectService, final String databaseType, final String tableName, final String sqlQuery, final List<String> columnNames, final List<String> maxValColumnNames, final String customWhereClause, final Map<String, String> stateMap ) { if (StringUtils.isEmpty(tableName)) { throw new IllegalArgumentException("Table name must be specified"); } final Optional<String> derivedTableQuery = Optional.ofNullable(sqlQuery); final List<ColumnDefinition> columnDefinitions = columnNames.stream() .map(StandardColumnDefinition::new) .map(ColumnDefinition.class::cast) .toList(); final TableDefinition tableDefinition = new TableDefinition(Optional.empty(), Optional.empty(), tableName, columnDefinitions); final QueryStatementRequest statementRequest = new StandardQueryStatementRequest( StatementType.SELECT, tableDefinition, derivedTableQuery, Optional.empty(), Optional.empty(), Optional.empty() ); final StatementResponse statementResponse = databaseDialectService.getStatement(statementRequest); final StringBuilder query = new StringBuilder(); query.append(statementResponse.sql()); List<String> whereClauses = new ArrayList<>(); // Check state map for last max values if (stateMap != null && !stateMap.isEmpty() && maxValColumnNames != null) { IntStream.range(0, maxValColumnNames.size()).forEach((index) -> { String colName = maxValColumnNames.get(index); String maxValueKey = getStateKey(tableName, colName); String maxValue = stateMap.get(maxValueKey); if (StringUtils.isEmpty(maxValue)) { // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme) // the value has been stored under a key that is only the column name. Fall back to check the column name; either way, when a new // maximum value is observed, it will be stored under the fully-qualified key from then on. maxValue = stateMap.get(colName.toLowerCase()); } if (!StringUtils.isEmpty(maxValue)) { Integer type = columnTypeMap.get(maxValueKey); if (type == null) { // This shouldn't happen as we are populating columnTypeMap when the processor is scheduled. throw new IllegalArgumentException("No column type found for: " + colName); } // Add a condition for the WHERE clause whereClauses.add(colName + (index == 0 ? " > " : " >= ") + getLiteralByType(type, maxValue, databaseType)); } }); } if (customWhereClause != null) { whereClauses.add("(" + customWhereClause + ")"); } if (!whereClauses.isEmpty()) { query.append(" WHERE "); query.append(StringUtils.join(whereClauses, " AND ")); } return query.toString(); } public class MaxValueResultSetRowCollector implements JdbcCommon.ResultSetRowCallback { final Map<String, String> newColMap; final Map<String, String> originalState; String tableName; public MaxValueResultSetRowCollector(String tableName, Map<String, String> stateMap) { this.originalState = stateMap; this.newColMap = new HashMap<>(); this.newColMap.putAll(stateMap); this.tableName = tableName; } @Override public void processRow(ResultSet resultSet) throws IOException { if (resultSet == null) { return; } try { // Iterate over the row, check-and-set max values final ResultSetMetaData meta = resultSet.getMetaData(); final int nrOfColumns = meta.getColumnCount(); if (nrOfColumns > 0) { for (int i = 1; i <= nrOfColumns; i++) { String colName = meta.getColumnName(i).toLowerCase(); String fullyQualifiedMaxValueKey = getStateKey(tableName, colName); Integer type = columnTypeMap.get(fullyQualifiedMaxValueKey); // Skip any columns we're not keeping track of or whose value is null if (type == null || resultSet.getObject(i) == null) { continue; } String maxValueString = newColMap.get(fullyQualifiedMaxValueKey); // If we can't find the value at the fully-qualified key name, it is possible (under a previous scheme) // the value has been stored under a key that is only the column name. Fall back to check the column name; either way, when a new // maximum value is observed, it will be stored under the fully-qualified key from then on. if (StringUtils.isEmpty(maxValueString)) { maxValueString = newColMap.get(colName); } String newMaxValueString = getMaxValueFromRow(resultSet, i, type, maxValueString); if (newMaxValueString != null) { newColMap.put(fullyQualifiedMaxValueKey, newMaxValueString); } } } } catch (ParseException | SQLException e) { throw new IOException(e); } } @Override public void applyStateChanges() { this.originalState.putAll(this.newColMap); } } protected abstract SqlWriter configureSqlWriter(ProcessSession session, ProcessContext context); }
apache/juneau
32,526
juneau-core/juneau-marshall/src/main/java/org/apache/juneau/uon/UonParserSession.java
// *************************************************************************************************************************** // * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file * // * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file * // * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance * // * with the License. You may obtain a copy of the License at * // * * // * http://www.apache.org/licenses/LICENSE-2.0 * // * * // * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an * // * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * // * specific language governing permissions and limitations under the License. * // *************************************************************************************************************************** package org.apache.juneau.uon; import static org.apache.juneau.collections.JsonMap.*; import static org.apache.juneau.common.utils.StringUtils.*; import static org.apache.juneau.common.utils.Utils.*; import java.io.*; import java.lang.reflect.*; import java.nio.charset.*; import java.util.*; import java.util.function.*; import org.apache.juneau.*; import org.apache.juneau.collections.*; import org.apache.juneau.common.utils.*; import org.apache.juneau.httppart.*; import org.apache.juneau.internal.*; import org.apache.juneau.parser.*; import org.apache.juneau.swap.*; /** * Session object that lives for the duration of a single use of {@link UonParser}. * * <h5 class='section'>Notes:</h5><ul> * <li class='warn'>This class is not thread safe and is typically discarded after one use. * </ul> * * <h5 class='section'>See Also:</h5><ul> * <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/UonBasics">UON Basics</a> * </ul> */ @SuppressWarnings({ "unchecked", "rawtypes" }) public class UonParserSession extends ReaderParserSession implements HttpPartParserSession { //------------------------------------------------------------------------------------------------------------------- // Static //------------------------------------------------------------------------------------------------------------------- // Characters that need to be preceded with an escape character. private static final AsciiSet escapedChars = AsciiSet.of("~'\u0001\u0002"); private static final char AMP='\u0001', EQ='\u0002'; // Flags set in reader to denote & and = characters. /** * Creates a new builder for this object. * * @param ctx The context creating this session. * @return A new builder. */ public static Builder create(UonParser ctx) { return new Builder(ctx); } //------------------------------------------------------------------------------------------------------------------- // Builder //------------------------------------------------------------------------------------------------------------------- /** * Builder class. */ @FluentSetters public static class Builder extends ReaderParserSession.Builder { UonParser ctx; boolean decoding; /** * Constructor * * @param ctx The context creating this session. */ protected Builder(UonParser ctx) { super(ctx); this.ctx = ctx; decoding = ctx.decoding; } @Override public UonParserSession build() { return new UonParserSession(this); } /** * Overrides the decoding flag on the context for this session. * * @param value The new value for this setting. * @return This object. */ @FluentSetter public Builder decoding(boolean value) { decoding = value; return this; } // <FluentSetters> @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public <T> Builder apply(Class<T> type, Consumer<T> apply) { super.apply(type, apply); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder debug(Boolean value) { super.debug(value); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder properties(Map<String,Object> value) { super.properties(value); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder property(String key, Object value) { super.property(key, value); return this; } @Override /* GENERATED - org.apache.juneau.ContextSession.Builder */ public Builder unmodifiable() { super.unmodifiable(); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder locale(Locale value) { super.locale(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder localeDefault(Locale value) { super.localeDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder mediaType(MediaType value) { super.mediaType(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder mediaTypeDefault(MediaType value) { super.mediaTypeDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder timeZone(TimeZone value) { super.timeZone(value); return this; } @Override /* GENERATED - org.apache.juneau.BeanSession.Builder */ public Builder timeZoneDefault(TimeZone value) { super.timeZoneDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */ public Builder javaMethod(Method value) { super.javaMethod(value); return this; } @Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */ public Builder outer(Object value) { super.outer(value); return this; } @Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */ public Builder schema(HttpPartSchema value) { super.schema(value); return this; } @Override /* GENERATED - org.apache.juneau.parser.ParserSession.Builder */ public Builder schemaDefault(HttpPartSchema value) { super.schemaDefault(value); return this; } @Override /* GENERATED - org.apache.juneau.parser.ReaderParserSession.Builder */ public Builder fileCharset(Charset value) { super.fileCharset(value); return this; } @Override /* GENERATED - org.apache.juneau.parser.ReaderParserSession.Builder */ public Builder streamCharset(Charset value) { super.streamCharset(value); return this; } // </FluentSetters> } //------------------------------------------------------------------------------------------------------------------- // Instance //------------------------------------------------------------------------------------------------------------------- private final UonParser ctx; private final boolean decoding; /** * Constructor. * * @param builder The builder for this object. */ protected UonParserSession(Builder builder) { super(builder); ctx = builder.ctx; decoding = builder.decoding; } @Override /* ParserSession */ protected <T> T doParse(ParserPipe pipe, ClassMeta<T> type) throws IOException, ParseException, ExecutableException { try (UonReader r = getUonReader(pipe, decoding)) { T o = parseAnything(type, r, getOuter(), true, null); validateEnd(r); return o; } } @Override /* ReaderParserSession */ protected <K,V> Map<K,V> doParseIntoMap(ParserPipe pipe, Map<K,V> m, Type keyType, Type valueType) throws Exception { try (UonReader r = getUonReader(pipe, decoding)) { m = parseIntoMap(r, m, (ClassMeta<K>)getClassMeta(keyType), (ClassMeta<V>)getClassMeta(valueType), null); validateEnd(r); return m; } } @Override /* ReaderParserSession */ protected <E> Collection<E> doParseIntoCollection(ParserPipe pipe, Collection<E> c, Type elementType) throws Exception { try (UonReader r = getUonReader(pipe, decoding)) { c = parseIntoCollection(r, c, (ClassMeta<E>)getClassMeta(elementType), false, null); validateEnd(r); return c; } } @Override /* HttpPartParser */ public <T> T parse(HttpPartType partType, HttpPartSchema schema, String in, ClassMeta<T> toType) throws ParseException, SchemaValidationException { if (in == null) return null; if (toType.isString() && isNotEmpty(in)) { // Shortcut - If we're returning a string and the value doesn't start with "'" or is "null", then // just return the string since it's a plain value. // This allows us to bypass the creation of a UonParserSession object. char x = firstNonWhitespaceChar(in); if (x != '\'' && x != 'n' && in.indexOf('~') == -1) return (T)in; if (x == 'n' && "null".equals(in)) return null; } try (ParserPipe pipe = createPipe(in)) { try (UonReader r = getUonReader(pipe, false)) { return parseAnything(toType, r, null, true, null); } } catch (ParseException e) { throw e; } catch (Exception e) { throw new ParseException(e); } } /** * Workhorse method. * * @param <T> The class type being parsed, or <jk>null</jk> if unknown. * @param eType The class type being parsed, or <jk>null</jk> if unknown. * @param r The reader being parsed. * @param outer The outer object (for constructing nested inner classes). * @param isUrlParamValue * If <jk>true</jk>, then we're parsing a top-level URL-encoded value which is treated a bit different than the * default case. * @param pMeta The current bean property being parsed. * @return The parsed object. * @throws IOException Thrown by underlying stream. * @throws ParseException Malformed input encountered. * @throws ExecutableException Exception occurred on invoked constructor/method/field. */ public <T> T parseAnything(ClassMeta<?> eType, UonReader r, Object outer, boolean isUrlParamValue, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException { if (eType == null) eType = object(); ObjectSwap<T,Object> swap = (ObjectSwap<T,Object>)eType.getSwap(this); BuilderSwap<T,Object> builder = (BuilderSwap<T,Object>)eType.getBuilderSwap(this); ClassMeta<?> sType = null; if (builder != null) sType = builder.getBuilderClassMeta(this); else if (swap != null) sType = swap.getSwapClassMeta(this); else sType = eType; if (sType.isOptional()) return (T)Utils.opt(parseAnything(eType.getElementType(), r, outer, isUrlParamValue, pMeta)); setCurrentClass(sType); Object o = null; int c = r.peekSkipWs(); if (c == -1 || c == AMP) { // If parameter is blank and it's an array or collection, return an empty list. if (sType.isCollectionOrArray()) o = sType.newInstance(); else if (sType.isString() || sType.isObject()) o = ""; else if (sType.isPrimitive()) o = sType.getPrimitiveDefault(); // Otherwise, leave null. } else if (sType.isVoid()) { String s = parseString(r, isUrlParamValue); if (s != null) throw new ParseException(this, "Expected ''null'' for void value, but was ''{0}''.", s); } else if (sType.isObject()) { if (c == '(') { JsonMap m = new JsonMap(this); parseIntoMap(r, m, string(), object(), pMeta); o = cast(m, pMeta, eType); } else if (c == '@') { Collection l = new JsonList(this); o = parseIntoCollection(r, l, sType, isUrlParamValue, pMeta); } else { String s = parseString(r, isUrlParamValue); if (c != '\'') { if ("true".equals(s) || "false".equals(s)) o = Boolean.valueOf(s); else if (! "null".equals(s)) { if (isNumeric(s)) o = StringUtils.parseNumber(s, Number.class); else o = s; } } else { o = s; } } } else if (sType.isBoolean()) { o = parseBoolean(r); } else if (sType.isCharSequence()) { o = parseString(r, isUrlParamValue); } else if (sType.isChar()) { o = parseCharacter(parseString(r, isUrlParamValue)); } else if (sType.isNumber()) { o = parseNumber(r, (Class<? extends Number>)sType.getInnerClass()); } else if (sType.isMap()) { Map m = (sType.canCreateNewInstance(outer) ? (Map)sType.newInstance(outer) : newGenericMap(sType)); o = parseIntoMap(r, m, sType.getKeyType(), sType.getValueType(), pMeta); } else if (sType.isCollection()) { if (c == '(') { JsonMap m = new JsonMap(this); parseIntoMap(r, m, string(), object(), pMeta); // Handle case where it's a collection, but serialized as a map with a _type or _value key. if (m.containsKey(getBeanTypePropertyName(sType))) o = cast(m, pMeta, eType); // Handle case where it's a collection, but only a single value was specified. else { Collection l = ( sType.canCreateNewInstance(outer) ? (Collection)sType.newInstance(outer) : new JsonList(this) ); l.add(m.cast(sType.getElementType())); o = l; } } else { Collection l = ( sType.canCreateNewInstance(outer) ? (Collection)sType.newInstance(outer) : new JsonList(this) ); o = parseIntoCollection(r, l, sType, isUrlParamValue, pMeta); } } else if (builder != null) { BeanMap m = toBeanMap(builder.create(this, eType)); m = parseIntoBeanMap(r, m); o = m == null ? null : builder.build(this, m.getBean(), eType); } else if (sType.canCreateNewBean(outer)) { BeanMap m = newBeanMap(outer, sType.getInnerClass()); m = parseIntoBeanMap(r, m); o = m == null ? null : m.getBean(); } else if (sType.canCreateNewInstanceFromString(outer)) { String s = parseString(r, isUrlParamValue); if (s != null) o = sType.newInstanceFromString(outer, s); } else if (sType.isArray() || sType.isArgs()) { if (c == '(') { JsonMap m = new JsonMap(this); parseIntoMap(r, m, string(), object(), pMeta); // Handle case where it's an array, but serialized as a map with a _type or _value key. if (m.containsKey(getBeanTypePropertyName(sType))) o = cast(m, pMeta, eType); // Handle case where it's an array, but only a single value was specified. else { ArrayList l = Utils.listOfSize(1); l.add(m.cast(sType.getElementType())); o = toArray(sType, l); } } else { ArrayList l = (ArrayList)parseIntoCollection(r, list(), sType, isUrlParamValue, pMeta); o = toArray(sType, l); } } else if (c == '(') { // It could be a non-bean with _type attribute. JsonMap m = new JsonMap(this); parseIntoMap(r, m, string(), object(), pMeta); if (m.containsKey(getBeanTypePropertyName(sType))) o = cast(m, pMeta, eType); else if (sType.getProxyInvocationHandler() != null) o = newBeanMap(outer, sType.getInnerClass()).load(m).getBean(); else throw new ParseException(this, "Class ''{0}'' could not be instantiated. Reason: ''{1}''", sType.getInnerClass().getName(), sType.getNotABeanReason()); } else if (c == 'n') { r.read(); // NOSONAR - Intentional. parseNull(r); } else { throw new ParseException(this, "Class ''{0}'' could not be instantiated. Reason: ''{1}''", sType.getInnerClass().getName(), sType.getNotABeanReason()); } if (o == null && sType.isPrimitive()) o = sType.getPrimitiveDefault(); if (swap != null && o != null) o = unswap(swap, o, eType); if (outer != null) setParent(eType, o, outer); return (T)o; } private <K,V> Map<K,V> parseIntoMap(UonReader r, Map<K,V> m, ClassMeta<K> keyType, ClassMeta<V> valueType, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException { if (keyType == null) keyType = (ClassMeta<K>)string(); int c = r.read(); if (c == -1 || c == AMP) return null; if (c == 'n') return (Map<K,V>)parseNull(r); if (c != '(') throw new ParseException(this, "Expected '(' at beginning of object."); final int S1=1; // Looking for attrName start. final int S2=2; // Found attrName end, looking for =. final int S3=3; // Found =, looking for valStart. final int S4=4; // Looking for , or ) boolean isInEscape = false; int state = S1; K currAttr = null; while (c != -1 && c != AMP) { c = r.read(); if (! isInEscape) { if (state == S1) { if (c == ')') return m; if (Character.isWhitespace(c)) skipSpace(r); else { r.unread(); Object attr = parseAttr(r, decoding); currAttr = attr == null ? null : convertAttrToType(m, trim(attr.toString()), keyType); state = S2; c = 0; // Avoid isInEscape if c was '\' } } else if (state == S2) { if (c == EQ || c == '=') state = S3; else if (c == -1 || c == ',' || c == ')' || c == AMP) { if (currAttr == null) { // Value was '%00' r.unread(); return null; } m.put(currAttr, null); if (c == ')' || c == -1 || c == AMP) return m; state = S1; } } else if (state == S3) { if (c == -1 || c == ',' || c == ')' || c == AMP) { V value = convertAttrToType(m, "", valueType); m.put(currAttr, value); if (c == -1 || c == ')' || c == AMP) return m; state = S1; } else { V value = parseAnything(valueType, r.unread(), m, false, pMeta); setName(valueType, value, currAttr); m.put(currAttr, value); state = S4; c = 0; // Avoid isInEscape if c was '\' } } else if (state == S4) { if (c == ',') state = S1; else if (c == ')' || c == -1 || c == AMP) { return m; } } } isInEscape = isInEscape(c, r, isInEscape); } if (state == S1) throw new ParseException(this, "Could not find attribute name on object."); if (state == S2) throw new ParseException(this, "Could not find '=' following attribute name on object."); if (state == S3) throw new ParseException(this, "Dangling '=' found in object entry"); if (state == S4) throw new ParseException(this, "Could not find ')' marking end of object."); return null; // Unreachable. } private <E> Collection<E> parseIntoCollection(UonReader r, Collection<E> l, ClassMeta<E> type, boolean isUrlParamValue, BeanPropertyMeta pMeta) throws IOException, ParseException, ExecutableException { int c = r.readSkipWs(); if (c == -1 || c == AMP) return null; if (c == 'n') return (Collection<E>)parseNull(r); int argIndex = 0; // If we're parsing a top-level parameter, we're allowed to have comma-delimited lists outside parenthesis (e.g. "&foo=1,2,3&bar=a,b,c") // This is not allowed at lower levels since we use comma's as end delimiters. boolean isInParens = (c == '@'); if (! isInParens) { if (isUrlParamValue) r.unread(); else throw new ParseException(this, "Could not find '(' marking beginning of collection."); } else { r.read(); // NOSONAR - Intentional, we're skipping the '@' character. } if (isInParens) { final int S1=1; // Looking for starting of first entry. final int S2=2; // Looking for starting of subsequent entries. final int S3=3; // Looking for , or ) after first entry. int state = S1; while (c != -1 && c != AMP) { c = r.read(); if (state == S1 || state == S2) { if (c == ')') { if (state == S2) { l.add((E)parseAnything(type.isArgs() ? type.getArg(argIndex++) : type.getElementType(), r.unread(), l, false, pMeta)); r.read(); // NOSONAR - Intentional, we're skipping the ')' character. } return l; } else if (Character.isWhitespace(c)) { skipSpace(r); } else { l.add((E)parseAnything(type.isArgs() ? type.getArg(argIndex++) : type.getElementType(), r.unread(), l, false, pMeta)); state = S3; } } else if (state == S3) { if (c == ',') { state = S2; } else if (c == ')') { return l; } } } if (state == S1 || state == S2) throw new ParseException(this, "Could not find start of entry in array."); if (state == S3) throw new ParseException(this, "Could not find end of entry in array."); } else { final int S1=1; // Looking for starting of entry. final int S2=2; // Looking for , or & or END after first entry. int state = S1; while (c != -1 && c != AMP) { c = r.read(); if (state == S1) { if (Character.isWhitespace(c)) { skipSpace(r); } else { l.add((E)parseAnything(type.isArgs() ? type.getArg(argIndex++) : type.getElementType(), r.unread(), l, false, pMeta)); state = S2; } } else if (state == S2) { if (c == ',') { state = S1; } else if (Character.isWhitespace(c)) { skipSpace(r); } else if (c == AMP || c == -1) { r.unread(); return l; } } } } return null; // Unreachable. } private <T> BeanMap<T> parseIntoBeanMap(UonReader r, BeanMap<T> m) throws IOException, ParseException, ExecutableException { int c = r.readSkipWs(); if (c == -1 || c == AMP) return null; if (c == 'n') return (BeanMap<T>)parseNull(r); if (c != '(') throw new ParseException(this, "Expected '(' at beginning of object."); final int S1=1; // Looking for attrName start. final int S2=2; // Found attrName end, looking for =. final int S3=3; // Found =, looking for valStart. final int S4=4; // Looking for , or } boolean isInEscape = false; int state = S1; String currAttr = ""; mark(); try { while (c != -1 && c != AMP) { c = r.read(); if (! isInEscape) { if (state == S1) { if (c == ')' || c == -1 || c == AMP) { return m; } if (Character.isWhitespace(c)) skipSpace(r); else { r.unread(); mark(); currAttr = parseAttrName(r, decoding); if (currAttr == null) { // Value was '%00' return null; } state = S2; } } else if (state == S2) { if (c == EQ || c == '=') state = S3; else if (c == -1 || c == ',' || c == ')' || c == AMP) { m.put(currAttr, null); if (c == ')' || c == -1 || c == AMP) { return m; } state = S1; } } else if (state == S3) { if (c == -1 || c == ',' || c == ')' || c == AMP) { if (! currAttr.equals(getBeanTypePropertyName(m.getClassMeta()))) { BeanPropertyMeta pMeta = m.getPropertyMeta(currAttr); if (pMeta == null) { onUnknownProperty(currAttr, m, null); unmark(); } else { unmark(); Object value = convertToType("", pMeta.getClassMeta()); try { pMeta.set(m, currAttr, value); } catch (BeanRuntimeException e) { onBeanSetterException(pMeta, e); throw e; } } } if (c == -1 || c == ')' || c == AMP) return m; state = S1; } else { if (! currAttr.equals(getBeanTypePropertyName(m.getClassMeta()))) { BeanPropertyMeta pMeta = m.getPropertyMeta(currAttr); if (pMeta == null) { onUnknownProperty(currAttr, m, parseAnything(object(), r.unread(), m.getBean(false), false, null)); unmark(); } else { unmark(); setCurrentProperty(pMeta); ClassMeta<?> cm = pMeta.getClassMeta(); Object value = parseAnything(cm, r.unread(), m.getBean(false), false, pMeta); setName(cm, value, currAttr); try { pMeta.set(m, currAttr, value); } catch (BeanRuntimeException e) { onBeanSetterException(pMeta, e); throw e; } setCurrentProperty(null); } } state = S4; } } else if (state == S4) { if (c == ',') state = S1; else if (c == ')' || c == -1 || c == AMP) { return m; } } } isInEscape = isInEscape(c, r, isInEscape); } if (state == S1) throw new ParseException(this, "Could not find attribute name on object."); if (state == S2) throw new ParseException(this, "Could not find '=' following attribute name on object."); if (state == S3) throw new ParseException(this, "Could not find value following '=' on object."); if (state == S4) throw new ParseException(this, "Could not find ')' marking end of object."); } finally { unmark(); } return null; // Unreachable. } private Object parseNull(UonReader r) throws IOException, ParseException { String s = parseString(r, false); if ("ull".equals(s)) return null; throw new ParseException(this, "Unexpected character sequence: ''{0}''", s); } /** * Convenience method for parsing an attribute from the specified parser. * * @param r The reader. * @param encoded Whether the attribute is encoded. * @return The parsed object * @throws IOException Exception thrown by underlying stream. * @throws ParseException Attribute was malformed. */ protected final Object parseAttr(UonReader r, boolean encoded) throws IOException, ParseException { Object attr; attr = parseAttrName(r, encoded); return attr; } /** * Parses an attribute name from the specified reader. * * @param r The reader. * @param encoded Whether the attribute is encoded. * @return The parsed attribute name. * @throws IOException Exception thrown by underlying stream. * @throws ParseException Attribute name was malformed. */ protected final String parseAttrName(UonReader r, boolean encoded) throws IOException, ParseException { // If string is of form 'xxx', we're looking for ' at the end. // Otherwise, we're looking for '&' or '=' or WS or -1 denoting the end of this string. int c = r.peekSkipWs(); if (c == '\'') return parsePString(r); r.mark(); boolean isInEscape = false; if (encoded) { while (c != -1) { c = r.read(); if (! isInEscape) { if (c == AMP || c == EQ || c == -1 || Character.isWhitespace(c)) { if (c != -1) r.unread(); String s = r.getMarked(); return ("null".equals(s) ? null : s); } } else if (c == AMP) r.replace('&'); else if (c == EQ) r.replace('='); isInEscape = isInEscape(c, r, isInEscape); } } else { while (c != -1) { c = r.read(); if (! isInEscape) { if (c == '=' || c == -1 || Character.isWhitespace(c)) { if (c != -1) r.unread(); String s = r.getMarked(); return ("null".equals(s) ? null : trim(s)); } } isInEscape = isInEscape(c, r, isInEscape); } } // We should never get here. throw new ParseException(this, "Unexpected condition."); } /* * Returns true if the next character in the stream is preceded by an escape '~' character. */ private static final boolean isInEscape(int c, ParserReader r, boolean prevIsInEscape) throws IOException { if (c == '~' && ! prevIsInEscape) { c = r.peek(); if (escapedChars.contains(c)) { r.delete(); return true; } } return false; } /** * Parses a string value from the specified reader. * * @param r The input reader. * @param isUrlParamValue Whether this is a URL parameter. * @return The parsed string. * @throws IOException Exception thrown by underlying stream. * @throws ParseException Malformed input found. */ protected final String parseString(UonReader r, boolean isUrlParamValue) throws IOException, ParseException { // If string is of form 'xxx', we're looking for ' at the end. // Otherwise, we're looking for ',' or ')' or -1 denoting the end of this string. int c = r.peekSkipWs(); if (c == '\'') return parsePString(r); r.mark(); boolean isInEscape = false; String s = null; AsciiSet endChars = (isUrlParamValue ? endCharsParam : endCharsNormal); while (c != -1) { c = r.read(); if (! isInEscape) { // If this is a URL parameter value, we're looking for: & // If not, we're looking for: &,) if (endChars.contains(c)) { r.unread(); c = -1; } } if (c == -1) s = r.getMarked(); else if (c == EQ) r.replace('='); else if (Character.isWhitespace(c) && ! isUrlParamValue) { s = r.getMarked(0, -1); skipSpace(r); c = -1; } isInEscape = isInEscape(c, r, isInEscape); } if (isUrlParamValue) s = StringUtils.trim(s); return ("null".equals(s) ? null : trim(s)); } private static final AsciiSet endCharsParam = AsciiSet.of(""+AMP), endCharsNormal = AsciiSet.of(",)"+AMP); /* * Parses a string of the form "'foo'" * All whitespace within parenthesis are preserved. */ private String parsePString(UonReader r) throws IOException, ParseException { r.read(); // Skip first quote, NOSONAR - Intentional. r.mark(); int c = 0; boolean isInEscape = false; while (c != -1) { c = r.read(); if (! isInEscape) { if (c == '\'') return trim(r.getMarked(0, -1)); } if (c == EQ) r.replace('='); isInEscape = isInEscape(c, r, isInEscape); } throw new ParseException(this, "Unmatched parenthesis"); } private Boolean parseBoolean(UonReader r) throws IOException, ParseException { String s = parseString(r, false); if (s == null || s.equals("null")) return null; if (s.equalsIgnoreCase("true")) return true; if (s.equalsIgnoreCase("false")) return false; throw new ParseException(this, "Unrecognized syntax for boolean. ''{0}''.", s); } private Number parseNumber(UonReader r, Class<? extends Number> c) throws IOException, ParseException { String s = parseString(r, false); if (s == null) return null; return StringUtils.parseNumber(s, c); } /* * Call this method after you've finished a parsing a string to make sure that if there's any * remainder in the input, that it consists only of whitespace and comments. */ private void validateEnd(UonReader r) throws IOException, ParseException { if (! isValidateEnd()) return; while (true) { int c = r.read(); if (c == -1) return; if (! Character.isWhitespace(c)) throw new ParseException(this, "Remainder after parse: ''{0}''.", (char)c); } } private static void skipSpace(ParserReader r) throws IOException { int c = 0; while ((c = r.read()) != -1) { if (c <= 2 || ! Character.isWhitespace(c)) { r.unread(); return; } } } /** * Creates a {@link UonReader} from the specified parser pipe. * * @param pipe The parser input. * @param decodeChars Whether the reader should automatically decode URL-encoded characters. * @return A new {@link UonReader} object. * @throws IOException Thrown by underlying stream. */ public final UonReader getUonReader(ParserPipe pipe, boolean decodeChars) throws IOException { Reader r = pipe.getReader(); if (r instanceof UonReader) return (UonReader)r; return new UonReader(pipe, decodeChars); } //----------------------------------------------------------------------------------------------------------------- // Properties //----------------------------------------------------------------------------------------------------------------- /** * Decode <js>"%xx"</js> sequences. * * @see UonParser.Builder#decoding() * @return * <jk>true</jk> if URI encoded characters should be decoded, <jk>false</jk> if they've already been decoded * before being passed to this parser. */ protected final boolean isDecoding() { return decoding; } /** * Validate end. * * @see UonParser.Builder#validateEnd() * @return * <jk>true</jk> if after parsing a POJO from the input, verifies that the remaining input in * the stream consists of only comments or whitespace. */ protected final boolean isValidateEnd() { return ctx.isValidateEnd(); } //----------------------------------------------------------------------------------------------------------------- // Other methods //----------------------------------------------------------------------------------------------------------------- @Override /* ContextSession */ protected JsonMap properties() { return filteredMap("decoding", decoding); } }
apache/nutch
35,396
src/java/org/apache/nutch/fetcher/FetcherThread.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nutch.fetcher; import java.io.IOException; import java.lang.invoke.MethodHandles; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map.Entry; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.util.StringUtils; import org.apache.nutch.crawl.CrawlDatum; import org.apache.nutch.crawl.NutchWritable; import org.apache.nutch.crawl.SignatureFactory; import org.apache.nutch.fetcher.Fetcher.FetcherRun; import org.apache.nutch.fetcher.FetcherThreadEvent.PublishEventType; import org.apache.nutch.metadata.Metadata; import org.apache.nutch.metadata.Nutch; import org.apache.nutch.net.URLExemptionFilters; import org.apache.nutch.net.URLFilterException; import org.apache.nutch.net.URLFilters; import org.apache.nutch.net.URLNormalizers; import org.apache.nutch.net.protocols.ProtocolLogUtil; import org.apache.nutch.parse.Outlink; import org.apache.nutch.parse.Parse; import org.apache.nutch.parse.ParseData; import org.apache.nutch.parse.ParseImpl; import org.apache.nutch.parse.ParseOutputFormat; import org.apache.nutch.parse.ParseResult; import org.apache.nutch.parse.ParseSegment; import org.apache.nutch.parse.ParseStatus; import org.apache.nutch.parse.ParseText; import org.apache.nutch.parse.ParseUtil; import org.apache.nutch.protocol.Content; import org.apache.nutch.protocol.Protocol; import org.apache.nutch.protocol.ProtocolFactory; import org.apache.nutch.protocol.ProtocolOutput; import org.apache.nutch.protocol.ProtocolStatus; import org.apache.nutch.scoring.ScoringFilterException; import org.apache.nutch.scoring.ScoringFilters; import org.apache.nutch.service.NutchServer; import org.apache.nutch.util.StringUtil; import org.apache.nutch.util.URLUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import crawlercommons.robots.BaseRobotRules; /** * This class picks items from queues and fetches the pages. */ public class FetcherThread extends Thread { private static final Logger LOG = LoggerFactory .getLogger(MethodHandles.lookup().lookupClass()); private Configuration conf; private URLFilters urlFilters; private URLExemptionFilters urlExemptionFilters; private ScoringFilters scfilters; private ParseUtil parseUtil; private URLNormalizers normalizers; private ProtocolFactory protocolFactory; private long maxCrawlDelay; private long minCrawlDelay; private String queueMode; private int maxRedirect; private boolean maxRedirectExceededSkip = false; private String reprUrl; private boolean redirecting; private int redirectCount; private boolean ignoreInternalLinks; private boolean ignoreExternalLinks; private boolean ignoreAlsoRedirects; private String ignoreExternalLinksMode; // Used by fetcher.follow.outlinks.depth in parse private final int maxOutlinks; private final int maxOutlinkLength; private final int interval; private int maxOutlinkDepth; private int maxOutlinkDepthNumLinks; private boolean outlinksIgnoreExternal; URLFilters urlFiltersForOutlinks; URLNormalizers normalizersForOutlinks; private boolean skipTruncated; private boolean halted = false; private AtomicInteger activeThreads; private FetchItemQueues fetchQueues; private QueueFeeder feeder; private AtomicInteger spinWaiting; private AtomicLong lastRequestStart; private AtomicInteger errors; private String segmentName; private boolean parsing; private FetcherRun.Context context; private boolean storingContent; private boolean signatureWithoutParsing; private AtomicInteger pages; private AtomicLong bytes; private List<Content> robotsTxtContent = null; private long robotsDeferVisitsDelay; private int robotsDeferVisitsRetries; //Used by the REST service private FetchNode fetchNode; private boolean reportToNutchServer; //Used for publishing events private FetcherThreadPublisher publisher; private boolean activatePublisher; private ProtocolLogUtil logUtil = new ProtocolLogUtil(); public FetcherThread(Configuration conf, AtomicInteger activeThreads, FetchItemQueues fetchQueues, QueueFeeder feeder, AtomicInteger spinWaiting, AtomicLong lastRequestStart, FetcherRun.Context context, AtomicInteger errors, String segmentName, boolean parsing, boolean storingContent, AtomicInteger pages, AtomicLong bytes) { this.setDaemon(true); // don't hang JVM on exit this.setName("FetcherThread"); // use an informative name this.conf = conf; this.urlFilters = new URLFilters(conf); this.urlExemptionFilters = new URLExemptionFilters(conf); this.scfilters = new ScoringFilters(conf); this.parseUtil = new ParseUtil(conf); this.skipTruncated = conf.getBoolean(ParseSegment.SKIP_TRUNCATED, true); this.signatureWithoutParsing = conf.getBoolean("fetcher.signature", false); this.protocolFactory = new ProtocolFactory(conf); this.normalizers = new URLNormalizers(conf, URLNormalizers.SCOPE_FETCHER); this.maxCrawlDelay = conf.getInt("fetcher.max.crawl.delay", 30) * 1000; float crawlDelay = conf.getFloat("fetcher.server.delay", 1.0f); this.minCrawlDelay = (long) (conf.getFloat("fetcher.min.crawl.delay", crawlDelay) * 1000); this.activeThreads = activeThreads; this.fetchQueues = fetchQueues; this.feeder = feeder; this.spinWaiting = spinWaiting; this.lastRequestStart = lastRequestStart; this.context = context; this.errors = errors; this.segmentName = segmentName; this.parsing = parsing; this.storingContent = storingContent; this.pages = pages; this.bytes = bytes; this.logUtil.setConf(conf); // NUTCH-2413 Apply filters and normalizers on outlinks // when parsing only if configured if (parsing) { if (conf.getBoolean("parse.filter.urls", true)) this.urlFiltersForOutlinks = urlFilters; if (conf.getBoolean("parse.normalize.urls", true)) this.normalizersForOutlinks = new URLNormalizers(conf, URLNormalizers.SCOPE_OUTLINK); } // NUTCH-2573 defer visits if robots.txt fails with HTTP 5xx if (conf.getBoolean("http.robots.503.defer.visits", true)) { this.robotsDeferVisitsDelay = conf .getLong("http.robots.503.defer.visits.delay", 5 * 60 * 1000L); this.robotsDeferVisitsRetries = conf .getInt("http.robots.503.defer.visits.retries", 3); } if((activatePublisher=conf.getBoolean("fetcher.publisher", false))) this.publisher = new FetcherThreadPublisher(conf); queueMode = conf.get("fetcher.queue.mode", FetchItemQueues.QUEUE_MODE_HOST); queueMode = FetchItemQueues.checkQueueMode(queueMode); LOG.info("{} {} Using queue mode : {}", getName(), Thread.currentThread().getId(), queueMode); this.maxRedirect = conf.getInt("http.redirect.max", 3); this.maxRedirectExceededSkip = conf .getBoolean("http.redirect.max.exceeded.skip", false); int maxOutlinksPerPage = conf.getInt("db.max.outlinks.per.page", 100); maxOutlinks = (maxOutlinksPerPage < 0) ? Integer.MAX_VALUE : maxOutlinksPerPage; int maxOutlinkL = conf.getInt("db.max.outlink.length", 4096); maxOutlinkLength = (maxOutlinkL < 0) ? Integer.MAX_VALUE : maxOutlinkL; interval = conf.getInt("db.fetch.interval.default", 2592000); ignoreInternalLinks = conf.getBoolean("db.ignore.internal.links", false); ignoreExternalLinks = conf.getBoolean("db.ignore.external.links", false); ignoreAlsoRedirects = conf.getBoolean("db.ignore.also.redirects", true); ignoreExternalLinksMode = conf.get("db.ignore.external.links.mode", "byHost"); maxOutlinkDepth = conf.getInt("fetcher.follow.outlinks.depth", -1); outlinksIgnoreExternal = conf.getBoolean( "fetcher.follow.outlinks.ignore.external", false); maxOutlinkDepthNumLinks = conf.getInt( "fetcher.follow.outlinks.num.links", 4); if (conf.getBoolean("fetcher.store.robotstxt", false)) { if (storingContent) { robotsTxtContent = new LinkedList<>(); } else { LOG.warn( "{} {} Ignoring fetcher.store.robotstxt because not storing content (fetcher.store.content)!", getName(), Thread.currentThread().getId()); } } } @Override @SuppressWarnings("fallthrough") public void run() { activeThreads.incrementAndGet(); // count threads FetchItem fit = null; try { // checking for the server to be running and fetcher.parse to be true if (parsing && NutchServer.getInstance().isRunning()) reportToNutchServer = true; while (true) { // creating FetchNode for storing in FetchNodeDb if (reportToNutchServer) this.fetchNode = new FetchNode(); else this.fetchNode = null; // check whether must be stopped if (isHalted()) { LOG.debug("{} set to halted", getName()); fit = null; return; } fit = fetchQueues.getFetchItem(); if (fit == null) { if (feeder.isAlive() || fetchQueues.getTotalSize() > 0) { LOG.debug("{} spin-waiting ...", getName()); // spin-wait. spinWaiting.incrementAndGet(); try { Thread.sleep(500); } catch (Exception e) { } spinWaiting.decrementAndGet(); continue; } else { // all done, finish this thread LOG.info("{} {} has no more work available", getName(), Thread.currentThread().getId()); return; } } lastRequestStart.set(System.currentTimeMillis()); Text reprUrlWritable = (Text) fit.datum.getMetaData().get( Nutch.WRITABLE_REPR_URL_KEY); if (reprUrlWritable == null) { setReprUrl(fit.url.toString()); } else { setReprUrl(reprUrlWritable.toString()); } try { // fetch the page redirecting = false; redirectCount = 0; //Publisher event if(activatePublisher) { FetcherThreadEvent startEvent = new FetcherThreadEvent(PublishEventType.START, fit.getUrl().toString()); publisher.publish(startEvent, conf); } do { LOG.info("{} {} fetching {} (queue crawl delay={}ms)", getName(), Thread.currentThread().getId(), fit.url, fetchQueues.getFetchItemQueue(fit.queueID).crawlDelay); LOG.debug("redirectCount={}", redirectCount); redirecting = false; Protocol protocol = this.protocolFactory.getProtocol(fit.u); BaseRobotRules rules = protocol.getRobotRules(fit.url, fit.datum, robotsTxtContent); if (robotsTxtContent != null) { outputRobotsTxt(robotsTxtContent); robotsTxtContent.clear(); } if (rules.isDeferVisits()) { LOG.info("Defer visits for queue {} : {}", fit.queueID, fit.url); // retry the fetch item if (fetchQueues.timelimitExceeded()) { fetchQueues.finishFetchItem(fit, true); } else { fetchQueues.addFetchItem(fit); } // but check whether it's time to cancel the queue int killedURLs = fetchQueues.checkExceptionThreshold( fit.getQueueID(), this.robotsDeferVisitsRetries + 1, this.robotsDeferVisitsDelay); if (killedURLs != 0) { context .getCounter("FetcherStatus", "robots_defer_visits_dropped") .increment(killedURLs); } continue; } if (!rules.isAllowed(fit.url.toString())) { // unblock fetchQueues.finishFetchItem(fit, true); LOG.info("Denied by robots.txt: {}", fit.url); output(fit.url, fit.datum, null, ProtocolStatus.STATUS_ROBOTS_DENIED, CrawlDatum.STATUS_FETCH_GONE); context.getCounter("FetcherStatus", "robots_denied").increment(1); continue; } if (rules.getCrawlDelay() > 0) { if (rules.getCrawlDelay() > maxCrawlDelay && maxCrawlDelay >= 0) { // unblock fetchQueues.finishFetchItem(fit, true); LOG.info("Crawl-Delay for {} too long ({} ms), skipping", fit.url, rules.getCrawlDelay()); output(fit.url, fit.datum, null, ProtocolStatus.STATUS_ROBOTS_DENIED, CrawlDatum.STATUS_FETCH_GONE); context.getCounter("FetcherStatus", "robots_denied_maxcrawldelay").increment(1); continue; } else { FetchItemQueue fiq = fetchQueues.getFetchItemQueue(fit.queueID); long crawlDelay = rules.getCrawlDelay(); if (crawlDelay < minCrawlDelay) { LOG.info( "Crawl-Delay for {} too short ({} ms), adjusting to {} ms", fit.url, rules.getCrawlDelay(), minCrawlDelay); crawlDelay = minCrawlDelay; } fiq.crawlDelay = crawlDelay; LOG.debug( "Crawl delay for queue: {} is set to {} as per robots.txt. url: {}", fit.queueID, fiq.crawlDelay, fit.url); } } ProtocolOutput output = protocol.getProtocolOutput(fit.url, fit.datum); ProtocolStatus status = output.getStatus(); Content content = output.getContent(); ParseStatus pstatus = null; // unblock queue fetchQueues.finishFetchItem(fit); // used for FetchNode if (fetchNode != null) { fetchNode.setStatus(status.getCode()); fetchNode.setFetchTime(System.currentTimeMillis()); fetchNode.setUrl(fit.url); } //Publish fetch finish event if(activatePublisher) { FetcherThreadEvent endEvent = new FetcherThreadEvent(PublishEventType.END, fit.getUrl().toString()); endEvent.addEventData("status", status.getName()); publisher.publish(endEvent, conf); } context.getCounter("FetcherStatus", status.getName()).increment(1); switch (status.getCode()) { case ProtocolStatus.SUCCESS: // got a page pstatus = output(fit.url, fit.datum, content, status, CrawlDatum.STATUS_FETCH_SUCCESS, fit.outlinkDepth); updateStatus(content.getContent().length); if (pstatus != null && pstatus.isSuccess() && pstatus.getMinorCode() == ParseStatus.SUCCESS_REDIRECT) { String newUrl = pstatus.getMessage(); int refreshTime = Integer.parseInt(pstatus.getArgs()[1]); Text redirUrl = handleRedirect(fit, newUrl, refreshTime < Fetcher.PERM_REFRESH_TIME, Fetcher.CONTENT_REDIR); if (redirUrl != null) { fit = queueRedirect(redirUrl, fit); } } break; case ProtocolStatus.MOVED: // redirect case ProtocolStatus.TEMP_MOVED: int code; boolean temp; if (status.getCode() == ProtocolStatus.MOVED) { code = CrawlDatum.STATUS_FETCH_REDIR_PERM; temp = false; } else { code = CrawlDatum.STATUS_FETCH_REDIR_TEMP; temp = true; } output(fit.url, fit.datum, content, status, code); String newUrl = status.getMessage(); Text redirUrl = handleRedirect(fit, newUrl, temp, Fetcher.PROTOCOL_REDIR); if (redirUrl != null) { fit = queueRedirect(redirUrl, fit); } else { // stop redirecting redirecting = false; } break; case ProtocolStatus.EXCEPTION: logError(fit.url, status.getMessage()); int killedURLs = fetchQueues .checkExceptionThreshold(fit.getQueueID()); if (killedURLs != 0) context.getCounter("FetcherStatus", "AboveExceptionThresholdInQueue").increment(killedURLs); /* FALLTHROUGH */ case ProtocolStatus.RETRY: // retry output(fit.url, fit.datum, null, status, CrawlDatum.STATUS_FETCH_RETRY); break; case ProtocolStatus.GONE: // gone case ProtocolStatus.NOTFOUND: case ProtocolStatus.ACCESS_DENIED: case ProtocolStatus.ROBOTS_DENIED: output(fit.url, fit.datum, null, status, CrawlDatum.STATUS_FETCH_GONE); break; case ProtocolStatus.NOTMODIFIED: output(fit.url, fit.datum, null, status, CrawlDatum.STATUS_FETCH_NOTMODIFIED); break; default: LOG.warn("{} {} Unknown ProtocolStatus: {}", getName(), Thread.currentThread().getId(), status.getCode()); output(fit.url, fit.datum, null, status, CrawlDatum.STATUS_FETCH_RETRY); } if (redirecting && redirectCount > maxRedirect) { fetchQueues.finishFetchItem(fit); context.getCounter("FetcherStatus", "redirect_count_exceeded") .increment(1); LOG.info("{} {} - redirect count exceeded {} ({})", getName(), Thread.currentThread().getId(), fit.url, maxRedirectExceededSkip ? "skipped" : "linked"); if (maxRedirectExceededSkip) { // skip redirect target when redirect count is exceeded } else { Text newUrl = new Text(status.getMessage()); CrawlDatum newDatum = createRedirDatum(newUrl, fit, CrawlDatum.STATUS_LINKED); output(newUrl, newDatum, null, null, CrawlDatum.STATUS_LINKED); } } } while (redirecting && (redirectCount <= maxRedirect)); } catch (Throwable t) { // unexpected exception // unblock fetchQueues.finishFetchItem(fit); String message; if (LOG.isDebugEnabled()) { message = StringUtils.stringifyException(t); } else if (logUtil.logShort(t)) { message = t.getClass().getName(); } else { message = StringUtils.stringifyException(t); } logError(fit.url, message); output(fit.url, fit.datum, null, ProtocolStatus.STATUS_FAILED, CrawlDatum.STATUS_FETCH_RETRY); } } } catch (Throwable e) { LOG.error("fetcher caught:", e); } finally { if (fit != null) { fetchQueues.finishFetchItem(fit); } activeThreads.decrementAndGet(); // count threads LOG.info("{} {} -finishing thread {}, activeThreads={}", getName(), Thread.currentThread().getId(), getName(), activeThreads); } } private Text handleRedirect(FetchItem fit, String newUrl, boolean temp, String redirType) throws MalformedURLException, URLFilterException, InterruptedException { if (newUrl.length() > maxOutlinkLength) { return null; } newUrl = normalizers.normalize(newUrl, URLNormalizers.SCOPE_FETCHER); newUrl = urlFilters.filter(newUrl); String urlString = fit.url.toString(); if (newUrl == null || newUrl.equals(urlString)) { LOG.debug(" - {} redirect skipped: {}", redirType, (newUrl != null ? "to same url" : "filtered")); return null; } if (ignoreAlsoRedirects && (ignoreExternalLinks || ignoreInternalLinks)) { try { URL origUrl = fit.u; URL redirUrl = new URL(newUrl); if (ignoreExternalLinks) { String origHostOrDomain, newHostOrDomain; if ("bydomain".equalsIgnoreCase(ignoreExternalLinksMode)) { origHostOrDomain = URLUtil.getDomainName(origUrl).toLowerCase(); newHostOrDomain = URLUtil.getDomainName(redirUrl).toLowerCase(); } else { // byHost origHostOrDomain = origUrl.getHost().toLowerCase(); newHostOrDomain = redirUrl.getHost().toLowerCase(); } if (!origHostOrDomain.equals(newHostOrDomain)) { LOG.debug( " - ignoring redirect {} from {} to {} because external links are ignored", redirType, urlString, newUrl); return null; } } if (ignoreInternalLinks) { String origHost = origUrl.getHost().toLowerCase(); String newHost = redirUrl.getHost().toLowerCase(); if (origHost.equals(newHost)) { LOG.debug( " - ignoring redirect {} from {} to {} because internal links are ignored", redirType, urlString, newUrl); return null; } } } catch (MalformedURLException e) { return null; } } reprUrl = URLUtil.chooseRepr(reprUrl, newUrl, temp); Text url = new Text(newUrl); if (maxRedirect > 0) { redirecting = true; redirectCount++; LOG.debug(" - {} redirect to {} (fetching now)", redirType, url); return url; } else { CrawlDatum newDatum = createRedirDatum(url, fit, CrawlDatum.STATUS_LINKED); output(url, newDatum, null, null, CrawlDatum.STATUS_LINKED); LOG.debug(" - {} redirect to {} (fetching later)", redirType, url); return null; } } private CrawlDatum createRedirDatum(Text redirUrl, FetchItem fit, byte status) { CrawlDatum newDatum = new CrawlDatum(status, fit.datum.getFetchInterval(), fit.datum.getScore()); // transfer existing metadata newDatum.getMetaData().putAll(fit.datum.getMetaData()); try { scfilters.initialScore(redirUrl, newDatum); } catch (ScoringFilterException e) { LOG.error("Scoring filtering failed for {}: ", redirUrl, e); } if (reprUrl != null) { newDatum.getMetaData().put(Nutch.WRITABLE_REPR_URL_KEY, new Text(reprUrl)); } return newDatum; } private FetchItem queueRedirect(Text redirUrl, FetchItem fit) throws ScoringFilterException { if (fetchQueues.redirectIsQueuedRecently(redirUrl)) { redirecting = false; context.getCounter("FetcherStatus", "redirect_deduplicated").increment(1); LOG.debug(" - ignoring redirect from {} to {} as duplicate", fit.url, redirUrl); return null; } else if (fetchQueues.timelimitExceeded()) { redirecting = false; context.getCounter("FetcherStatus", "hitByTimeLimit").increment(1); LOG.debug(" - ignoring redirect from {} to {} - timelimit reached", fit.url, redirUrl); return null; } CrawlDatum newDatum = createRedirDatum(redirUrl, fit, CrawlDatum.STATUS_DB_UNFETCHED); fit = FetchItem.create(redirUrl, newDatum, queueMode); if (fit != null) { FetchItemQueue fiq = fetchQueues.getFetchItemQueue(fit.queueID); fiq.addInProgressFetchItem(fit); } else { // stop redirecting redirecting = false; context.getCounter("FetcherStatus", "FetchItem.notCreated.redirect").increment(1); } return fit; } private void logError(Text url, String message) { LOG.info("{} {} fetch of {} failed with: {}", getName(), Thread.currentThread().getId(), url, message); errors.incrementAndGet(); } private ParseStatus output(Text key, CrawlDatum datum, Content content, ProtocolStatus pstatus, int status) throws InterruptedException{ return output(key, datum, content, pstatus, status, 0); } private ParseStatus output(Text key, CrawlDatum datum, Content content, ProtocolStatus pstatus, int status, int outlinkDepth) throws InterruptedException{ datum.setStatus(status); datum.setFetchTime(System.currentTimeMillis()); if (pstatus != null) datum.getMetaData().put(Nutch.WRITABLE_PROTO_STATUS_KEY, pstatus); ParseResult parseResult = null; if (content != null) { Metadata metadata = content.getMetadata(); // store the guessed content type in the crawldatum if (content.getContentType() != null) datum.getMetaData().put(new Text(Metadata.CONTENT_TYPE), new Text(content.getContentType())); // add segment to metadata metadata.set(Nutch.SEGMENT_NAME_KEY, segmentName); // add score to content metadata so that ParseSegment can pick it up. try { scfilters.passScoreBeforeParsing(key, datum, content); } catch (Exception e) { LOG.warn("{} {} Couldn't pass score, url {} ({})", getName(), Thread.currentThread().getId(), key, e); } if (status == CrawlDatum.STATUS_FETCH_SUCCESS) { if (parsing && !(skipTruncated && ParseSegment.isTruncated(content))) { try { parseResult = this.parseUtil.parse(content); } catch (Exception e) { LOG.warn("{} {} Error parsing: {}: {}", getName(), Thread.currentThread().getId(), key, StringUtils.stringifyException(e)); } } if (parseResult == null && (parsing || signatureWithoutParsing)) { byte[] signature = SignatureFactory.getSignature(conf) .calculate(content, new ParseStatus().getEmptyParse(conf)); datum.setSignature(signature); } } /* * Store status code in content So we can read this value during parsing * (as a separate job) and decide to parse or not. */ content.getMetadata().add(Nutch.FETCH_STATUS_KEY, Integer.toString(status)); } try { context.write(key, new NutchWritable(datum)); if (content != null && storingContent) context.write(key, new NutchWritable(content)); if (parseResult != null) { for (Entry<Text, Parse> entry : parseResult) { Text url = entry.getKey(); Parse parse = entry.getValue(); ParseStatus parseStatus = parse.getData().getStatus(); ParseData parseData = parse.getData(); if (!parseStatus.isSuccess()) { LOG.warn("{} {} Error parsing: {}: {}", getName(), Thread.currentThread().getId(), key, parseStatus); parse = parseStatus.getEmptyParse(conf); } // Calculate page signature. For non-parsing fetchers this will // be done in ParseSegment byte[] signature = SignatureFactory.getSignature(conf) .calculate(content, parse); // Ensure segment name and score are in parseData metadata parseData.getContentMeta().set(Nutch.SEGMENT_NAME_KEY, segmentName); parseData.getContentMeta().set(Nutch.SIGNATURE_KEY, StringUtil.toHexString(signature)); // Pass fetch time to content meta parseData.getContentMeta().set(Nutch.FETCH_TIME_KEY, Long.toString(datum.getFetchTime())); if (url.equals(key)) datum.setSignature(signature); try { scfilters.passScoreAfterParsing(url, content, parse); } catch (Exception e) { LOG.warn("{} {} Couldn't pass score, url {} ({})", getName(), Thread.currentThread().getId(), key, e); } String origin = null; // collect outlinks for subsequent db update Outlink[] links = parseData.getOutlinks(); int outlinksToStore = Math.min(maxOutlinks, links.length); if (ignoreExternalLinks || ignoreInternalLinks) { URL originURL = new URL(url.toString()); // based on domain? if ("bydomain".equalsIgnoreCase(ignoreExternalLinksMode)) { origin = URLUtil.getDomainName(originURL).toLowerCase(); } // use host else { origin = originURL.getHost().toLowerCase(); } } //used by fetchNode if(fetchNode!=null){ fetchNode.setOutlinks(links); fetchNode.setTitle(parseData.getTitle()); FetchNodeDb.getInstance().put(fetchNode.getUrl().toString(), fetchNode); } int validCount = 0; // Process all outlinks, normalize, filter and deduplicate List<Outlink> outlinkList = new ArrayList<>(outlinksToStore); HashSet<String> outlinks = new HashSet<>(outlinksToStore); for (int i = 0; i < links.length && validCount < outlinksToStore; i++) { String toUrl = links[i].getToUrl(); if (toUrl.length() > maxOutlinkLength) { continue; } toUrl = ParseOutputFormat.filterNormalize(url.toString(), toUrl, origin, ignoreInternalLinks, ignoreExternalLinks, ignoreExternalLinksMode, urlFiltersForOutlinks, urlExemptionFilters, normalizersForOutlinks); if (toUrl == null) { continue; } validCount++; links[i].setUrl(toUrl); outlinkList.add(links[i]); outlinks.add(toUrl); } //Publish fetch report event if(activatePublisher) { FetcherThreadEvent reportEvent = new FetcherThreadEvent(PublishEventType.REPORT, url.toString()); reportEvent.addOutlinksToEventData(outlinkList); reportEvent.addEventData(Nutch.FETCH_EVENT_TITLE, parseData.getTitle()); reportEvent.addEventData(Nutch.FETCH_EVENT_CONTENTTYPE, parseData.getContentMeta().get("content-type")); reportEvent.addEventData(Nutch.FETCH_EVENT_SCORE, datum.getScore()); reportEvent.addEventData(Nutch.FETCH_EVENT_FETCHTIME, datum.getFetchTime()); reportEvent.addEventData(Nutch.FETCH_EVENT_CONTENTLANG, parseData.getContentMeta().get("content-language")); publisher.publish(reportEvent, conf); } // Only process depth N outlinks if (maxOutlinkDepth > 0 && outlinkDepth < maxOutlinkDepth && !fetchQueues.timelimitExceeded()) { FetchItem ft = FetchItem.create(url, null, queueMode); FetchItemQueue queue = fetchQueues.getFetchItemQueue(ft.queueID); queue.alreadyFetched.add(url.toString().hashCode()); context.getCounter("FetcherOutlinks", "outlinks_detected").increment( outlinks.size()); // Counter to limit num outlinks to follow per page int outlinkCounter = 0; String followUrl; // Walk over the outlinks and add as new FetchItem to the queues Iterator<String> iter = outlinks.iterator(); while (iter.hasNext() && outlinkCounter < maxOutlinkDepthNumLinks) { followUrl = iter.next(); // Check whether we'll follow external outlinks if (outlinksIgnoreExternal) { if (!URLUtil.getHost(url.toString()).equals( URLUtil.getHost(followUrl))) { continue; } } // Already followed? int urlHashCode = followUrl.hashCode(); if (queue.alreadyFetched.contains(urlHashCode)) { continue; } queue.alreadyFetched.add(urlHashCode); // Create new FetchItem with depth incremented FetchItem fit = FetchItem.create(new Text(followUrl), new CrawlDatum(CrawlDatum.STATUS_LINKED, interval), queueMode, outlinkDepth + 1); context.getCounter("FetcherOutlinks", "outlinks_following").increment(1); fetchQueues.addFetchItem(fit); outlinkCounter++; } } // Overwrite the outlinks in ParseData with the normalized and // filtered set parseData.setOutlinks(outlinkList.toArray(new Outlink[outlinkList .size()])); context.write(url, new NutchWritable(new ParseImpl(new ParseText( parse.getText()), parseData, parse.isCanonical()))); } } } catch (IOException e) { LOG.error("fetcher caught:", e); } // return parse status (of the "original" URL if the ParseResult contains // multiple parses) which allows Fetcher to follow meta-redirects if (parseResult != null && !parseResult.isEmpty()) { Parse p = parseResult.get(content.getUrl()); if (p != null) { context.getCounter("ParserStatus", ParseStatus.majorCodes[p .getData().getStatus().getMajorCode()]).increment(1); return p.getData().getStatus(); } } return null; } private void outputRobotsTxt(List<Content> robotsTxtContent) throws InterruptedException { for (Content robotsTxt : robotsTxtContent) { LOG.debug("Fetched and stored robots.txt {}", robotsTxt.getUrl()); try { context.write(new Text(robotsTxt.getUrl()), new NutchWritable(robotsTxt)); } catch (IOException e) { LOG.error("Fetcher failed to store the robots.txt:", e); } } } private void updateStatus(int bytesInPage) throws IOException { pages.incrementAndGet(); bytes.addAndGet(bytesInPage); } public synchronized void setHalted(boolean halted) { this.halted = halted; } public synchronized boolean isHalted() { return halted; } public String getReprUrl() { return reprUrl; } private void setReprUrl(String urlString) { this.reprUrl = urlString; } }
googleapis/google-cloud-java
35,165
java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/Package.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1/vulnerability.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1; /** * * * <pre> * Package is a generic definition of a package. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.Package} */ public final class Package extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.Package) PackageOrBuilder { private static final long serialVersionUID = 0L; // Use Package.newBuilder() to construct. private Package(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Package() { packageName_ = ""; cpeUri_ = ""; packageType_ = ""; packageVersion_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Package(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Package_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Package_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.Package.class, com.google.cloud.securitycenter.v1.Package.Builder.class); } public static final int PACKAGE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object packageName_ = ""; /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The packageName. */ @java.lang.Override public java.lang.String getPackageName() { java.lang.Object ref = packageName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageName_ = s; return s; } } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The bytes for packageName. */ @java.lang.Override public com.google.protobuf.ByteString getPackageNameBytes() { java.lang.Object ref = packageName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CPE_URI_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object cpeUri_ = ""; /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The cpeUri. */ @java.lang.Override public java.lang.String getCpeUri() { java.lang.Object ref = cpeUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); cpeUri_ = s; return s; } } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The bytes for cpeUri. */ @java.lang.Override public com.google.protobuf.ByteString getCpeUriBytes() { java.lang.Object ref = cpeUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); cpeUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PACKAGE_TYPE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object packageType_ = ""; /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The packageType. */ @java.lang.Override public java.lang.String getPackageType() { java.lang.Object ref = packageType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageType_ = s; return s; } } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The bytes for packageType. */ @java.lang.Override public com.google.protobuf.ByteString getPackageTypeBytes() { java.lang.Object ref = packageType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PACKAGE_VERSION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object packageVersion_ = ""; /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The packageVersion. */ @java.lang.Override public java.lang.String getPackageVersion() { java.lang.Object ref = packageVersion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageVersion_ = s; return s; } } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The bytes for packageVersion. */ @java.lang.Override public com.google.protobuf.ByteString getPackageVersionBytes() { java.lang.Object ref = packageVersion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, packageName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cpeUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, cpeUri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, packageType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageVersion_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, packageVersion_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, packageName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cpeUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, cpeUri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, packageType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageVersion_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, packageVersion_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1.Package)) { return super.equals(obj); } com.google.cloud.securitycenter.v1.Package other = (com.google.cloud.securitycenter.v1.Package) obj; if (!getPackageName().equals(other.getPackageName())) return false; if (!getCpeUri().equals(other.getCpeUri())) return false; if (!getPackageType().equals(other.getPackageType())) return false; if (!getPackageVersion().equals(other.getPackageVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PACKAGE_NAME_FIELD_NUMBER; hash = (53 * hash) + getPackageName().hashCode(); hash = (37 * hash) + CPE_URI_FIELD_NUMBER; hash = (53 * hash) + getCpeUri().hashCode(); hash = (37 * hash) + PACKAGE_TYPE_FIELD_NUMBER; hash = (53 * hash) + getPackageType().hashCode(); hash = (37 * hash) + PACKAGE_VERSION_FIELD_NUMBER; hash = (53 * hash) + getPackageVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1.Package parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.Package parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Package parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.Package parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Package parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.Package parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Package parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.Package parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Package parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.Package parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.Package parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.Package parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.securitycenter.v1.Package prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Package is a generic definition of a package. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.Package} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.Package) com.google.cloud.securitycenter.v1.PackageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Package_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Package_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.Package.class, com.google.cloud.securitycenter.v1.Package.Builder.class); } // Construct using com.google.cloud.securitycenter.v1.Package.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; packageName_ = ""; cpeUri_ = ""; packageType_ = ""; packageVersion_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1.VulnerabilityProto .internal_static_google_cloud_securitycenter_v1_Package_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1.Package getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1.Package.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1.Package build() { com.google.cloud.securitycenter.v1.Package result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1.Package buildPartial() { com.google.cloud.securitycenter.v1.Package result = new com.google.cloud.securitycenter.v1.Package(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securitycenter.v1.Package result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.packageName_ = packageName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.cpeUri_ = cpeUri_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.packageType_ = packageType_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.packageVersion_ = packageVersion_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1.Package) { return mergeFrom((com.google.cloud.securitycenter.v1.Package) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v1.Package other) { if (other == com.google.cloud.securitycenter.v1.Package.getDefaultInstance()) return this; if (!other.getPackageName().isEmpty()) { packageName_ = other.packageName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getCpeUri().isEmpty()) { cpeUri_ = other.cpeUri_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getPackageType().isEmpty()) { packageType_ = other.packageType_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getPackageVersion().isEmpty()) { packageVersion_ = other.packageVersion_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { packageName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { cpeUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { packageType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { packageVersion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object packageName_ = ""; /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The packageName. */ public java.lang.String getPackageName() { java.lang.Object ref = packageName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The bytes for packageName. */ public com.google.protobuf.ByteString getPackageNameBytes() { java.lang.Object ref = packageName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @param value The packageName to set. * @return This builder for chaining. */ public Builder setPackageName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } packageName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return This builder for chaining. */ public Builder clearPackageName() { packageName_ = getDefaultInstance().getPackageName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @param value The bytes for packageName to set. * @return This builder for chaining. */ public Builder setPackageNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); packageName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object cpeUri_ = ""; /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The cpeUri. */ public java.lang.String getCpeUri() { java.lang.Object ref = cpeUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); cpeUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The bytes for cpeUri. */ public com.google.protobuf.ByteString getCpeUriBytes() { java.lang.Object ref = cpeUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); cpeUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @param value The cpeUri to set. * @return This builder for chaining. */ public Builder setCpeUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } cpeUri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return This builder for chaining. */ public Builder clearCpeUri() { cpeUri_ = getDefaultInstance().getCpeUri(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @param value The bytes for cpeUri to set. * @return This builder for chaining. */ public Builder setCpeUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); cpeUri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object packageType_ = ""; /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The packageType. */ public java.lang.String getPackageType() { java.lang.Object ref = packageType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The bytes for packageType. */ public com.google.protobuf.ByteString getPackageTypeBytes() { java.lang.Object ref = packageType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @param value The packageType to set. * @return This builder for chaining. */ public Builder setPackageType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } packageType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return This builder for chaining. */ public Builder clearPackageType() { packageType_ = getDefaultInstance().getPackageType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @param value The bytes for packageType to set. * @return This builder for chaining. */ public Builder setPackageTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); packageType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object packageVersion_ = ""; /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The packageVersion. */ public java.lang.String getPackageVersion() { java.lang.Object ref = packageVersion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageVersion_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The bytes for packageVersion. */ public com.google.protobuf.ByteString getPackageVersionBytes() { java.lang.Object ref = packageVersion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @param value The packageVersion to set. * @return This builder for chaining. */ public Builder setPackageVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } packageVersion_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return This builder for chaining. */ public Builder clearPackageVersion() { packageVersion_ = getDefaultInstance().getPackageVersion(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @param value The bytes for packageVersion to set. * @return This builder for chaining. */ public Builder setPackageVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); packageVersion_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.Package) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.Package) private static final com.google.cloud.securitycenter.v1.Package DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.Package(); } public static com.google.cloud.securitycenter.v1.Package getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Package> PARSER = new com.google.protobuf.AbstractParser<Package>() { @java.lang.Override public Package parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Package> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Package> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1.Package getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,165
java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/Package.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v2/vulnerability.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v2; /** * * * <pre> * Package is a generic definition of a package. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.Package} */ public final class Package extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.Package) PackageOrBuilder { private static final long serialVersionUID = 0L; // Use Package.newBuilder() to construct. private Package(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Package() { packageName_ = ""; cpeUri_ = ""; packageType_ = ""; packageVersion_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Package(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_Package_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_Package_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.Package.class, com.google.cloud.securitycenter.v2.Package.Builder.class); } public static final int PACKAGE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object packageName_ = ""; /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The packageName. */ @java.lang.Override public java.lang.String getPackageName() { java.lang.Object ref = packageName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageName_ = s; return s; } } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The bytes for packageName. */ @java.lang.Override public com.google.protobuf.ByteString getPackageNameBytes() { java.lang.Object ref = packageName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CPE_URI_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object cpeUri_ = ""; /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The cpeUri. */ @java.lang.Override public java.lang.String getCpeUri() { java.lang.Object ref = cpeUri_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); cpeUri_ = s; return s; } } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The bytes for cpeUri. */ @java.lang.Override public com.google.protobuf.ByteString getCpeUriBytes() { java.lang.Object ref = cpeUri_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); cpeUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PACKAGE_TYPE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object packageType_ = ""; /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The packageType. */ @java.lang.Override public java.lang.String getPackageType() { java.lang.Object ref = packageType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageType_ = s; return s; } } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The bytes for packageType. */ @java.lang.Override public com.google.protobuf.ByteString getPackageTypeBytes() { java.lang.Object ref = packageType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PACKAGE_VERSION_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object packageVersion_ = ""; /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The packageVersion. */ @java.lang.Override public java.lang.String getPackageVersion() { java.lang.Object ref = packageVersion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageVersion_ = s; return s; } } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The bytes for packageVersion. */ @java.lang.Override public com.google.protobuf.ByteString getPackageVersionBytes() { java.lang.Object ref = packageVersion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, packageName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cpeUri_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, cpeUri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, packageType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageVersion_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, packageVersion_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, packageName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(cpeUri_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, cpeUri_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, packageType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(packageVersion_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, packageVersion_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v2.Package)) { return super.equals(obj); } com.google.cloud.securitycenter.v2.Package other = (com.google.cloud.securitycenter.v2.Package) obj; if (!getPackageName().equals(other.getPackageName())) return false; if (!getCpeUri().equals(other.getCpeUri())) return false; if (!getPackageType().equals(other.getPackageType())) return false; if (!getPackageVersion().equals(other.getPackageVersion())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PACKAGE_NAME_FIELD_NUMBER; hash = (53 * hash) + getPackageName().hashCode(); hash = (37 * hash) + CPE_URI_FIELD_NUMBER; hash = (53 * hash) + getCpeUri().hashCode(); hash = (37 * hash) + PACKAGE_TYPE_FIELD_NUMBER; hash = (53 * hash) + getPackageType().hashCode(); hash = (37 * hash) + PACKAGE_VERSION_FIELD_NUMBER; hash = (53 * hash) + getPackageVersion().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v2.Package parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.Package parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.Package parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.Package parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.Package parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.Package parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.Package parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.Package parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.Package parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.Package parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.Package parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.Package parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.securitycenter.v2.Package prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Package is a generic definition of a package. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.Package} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.Package) com.google.cloud.securitycenter.v2.PackageOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_Package_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_Package_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.Package.class, com.google.cloud.securitycenter.v2.Package.Builder.class); } // Construct using com.google.cloud.securitycenter.v2.Package.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; packageName_ = ""; cpeUri_ = ""; packageType_ = ""; packageVersion_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v2.VulnerabilityProto .internal_static_google_cloud_securitycenter_v2_Package_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v2.Package getDefaultInstanceForType() { return com.google.cloud.securitycenter.v2.Package.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v2.Package build() { com.google.cloud.securitycenter.v2.Package result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v2.Package buildPartial() { com.google.cloud.securitycenter.v2.Package result = new com.google.cloud.securitycenter.v2.Package(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.securitycenter.v2.Package result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.packageName_ = packageName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.cpeUri_ = cpeUri_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.packageType_ = packageType_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.packageVersion_ = packageVersion_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v2.Package) { return mergeFrom((com.google.cloud.securitycenter.v2.Package) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v2.Package other) { if (other == com.google.cloud.securitycenter.v2.Package.getDefaultInstance()) return this; if (!other.getPackageName().isEmpty()) { packageName_ = other.packageName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getCpeUri().isEmpty()) { cpeUri_ = other.cpeUri_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getPackageType().isEmpty()) { packageType_ = other.packageType_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getPackageVersion().isEmpty()) { packageVersion_ = other.packageVersion_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { packageName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { cpeUri_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { packageType_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { packageVersion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object packageName_ = ""; /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The packageName. */ public java.lang.String getPackageName() { java.lang.Object ref = packageName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return The bytes for packageName. */ public com.google.protobuf.ByteString getPackageNameBytes() { java.lang.Object ref = packageName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @param value The packageName to set. * @return This builder for chaining. */ public Builder setPackageName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } packageName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @return This builder for chaining. */ public Builder clearPackageName() { packageName_ = getDefaultInstance().getPackageName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The name of the package where the vulnerability was detected. * </pre> * * <code>string package_name = 1;</code> * * @param value The bytes for packageName to set. * @return This builder for chaining. */ public Builder setPackageNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); packageName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object cpeUri_ = ""; /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The cpeUri. */ public java.lang.String getCpeUri() { java.lang.Object ref = cpeUri_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); cpeUri_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return The bytes for cpeUri. */ public com.google.protobuf.ByteString getCpeUriBytes() { java.lang.Object ref = cpeUri_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); cpeUri_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @param value The cpeUri to set. * @return This builder for chaining. */ public Builder setCpeUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } cpeUri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @return This builder for chaining. */ public Builder clearCpeUri() { cpeUri_ = getDefaultInstance().getCpeUri(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The CPE URI where the vulnerability was detected. * </pre> * * <code>string cpe_uri = 2;</code> * * @param value The bytes for cpeUri to set. * @return This builder for chaining. */ public Builder setCpeUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); cpeUri_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object packageType_ = ""; /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The packageType. */ public java.lang.String getPackageType() { java.lang.Object ref = packageType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return The bytes for packageType. */ public com.google.protobuf.ByteString getPackageTypeBytes() { java.lang.Object ref = packageType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @param value The packageType to set. * @return This builder for chaining. */ public Builder setPackageType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } packageType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @return This builder for chaining. */ public Builder clearPackageType() { packageType_ = getDefaultInstance().getPackageType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Type of package, for example, os, maven, or go. * </pre> * * <code>string package_type = 3;</code> * * @param value The bytes for packageType to set. * @return This builder for chaining. */ public Builder setPackageTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); packageType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object packageVersion_ = ""; /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The packageVersion. */ public java.lang.String getPackageVersion() { java.lang.Object ref = packageVersion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); packageVersion_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return The bytes for packageVersion. */ public com.google.protobuf.ByteString getPackageVersionBytes() { java.lang.Object ref = packageVersion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); packageVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @param value The packageVersion to set. * @return This builder for chaining. */ public Builder setPackageVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } packageVersion_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @return This builder for chaining. */ public Builder clearPackageVersion() { packageVersion_ = getDefaultInstance().getPackageVersion(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The version of the package. * </pre> * * <code>string package_version = 4;</code> * * @param value The bytes for packageVersion to set. * @return This builder for chaining. */ public Builder setPackageVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); packageVersion_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.Package) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.Package) private static final com.google.cloud.securitycenter.v2.Package DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.Package(); } public static com.google.cloud.securitycenter.v2.Package getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Package> PARSER = new com.google.protobuf.AbstractParser<Package>() { @java.lang.Override public Package parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Package> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Package> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v2.Package getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,349
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/SummarizationSectionList.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/generator.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * List of summarization sections. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.SummarizationSectionList} */ public final class SummarizationSectionList extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.SummarizationSectionList) SummarizationSectionListOrBuilder { private static final long serialVersionUID = 0L; // Use SummarizationSectionList.newBuilder() to construct. private SummarizationSectionList(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SummarizationSectionList() { summarizationSections_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SummarizationSectionList(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.GeneratorProto .internal_static_google_cloud_dialogflow_v2beta1_SummarizationSectionList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.GeneratorProto .internal_static_google_cloud_dialogflow_v2beta1_SummarizationSectionList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.class, com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.Builder.class); } public static final int SUMMARIZATION_SECTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dialogflow.v2beta1.SummarizationSection> summarizationSections_; /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.dialogflow.v2beta1.SummarizationSection> getSummarizationSectionsList() { return summarizationSections_; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder> getSummarizationSectionsOrBuilderList() { return summarizationSections_; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public int getSummarizationSectionsCount() { return summarizationSections_.size(); } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SummarizationSection getSummarizationSections( int index) { return summarizationSections_.get(index); } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder getSummarizationSectionsOrBuilder(int index) { return summarizationSections_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < summarizationSections_.size(); i++) { output.writeMessage(1, summarizationSections_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < summarizationSections_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, summarizationSections_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.SummarizationSectionList)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.SummarizationSectionList other = (com.google.cloud.dialogflow.v2beta1.SummarizationSectionList) obj; if (!getSummarizationSectionsList().equals(other.getSummarizationSectionsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSummarizationSectionsCount() > 0) { hash = (37 * hash) + SUMMARIZATION_SECTIONS_FIELD_NUMBER; hash = (53 * hash) + getSummarizationSectionsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2beta1.SummarizationSectionList prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List of summarization sections. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.SummarizationSectionList} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.SummarizationSectionList) com.google.cloud.dialogflow.v2beta1.SummarizationSectionListOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.GeneratorProto .internal_static_google_cloud_dialogflow_v2beta1_SummarizationSectionList_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.GeneratorProto .internal_static_google_cloud_dialogflow_v2beta1_SummarizationSectionList_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.class, com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.Builder.class); } // Construct using com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (summarizationSectionsBuilder_ == null) { summarizationSections_ = java.util.Collections.emptyList(); } else { summarizationSections_ = null; summarizationSectionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.GeneratorProto .internal_static_google_cloud_dialogflow_v2beta1_SummarizationSectionList_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SummarizationSectionList getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SummarizationSectionList build() { com.google.cloud.dialogflow.v2beta1.SummarizationSectionList result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SummarizationSectionList buildPartial() { com.google.cloud.dialogflow.v2beta1.SummarizationSectionList result = new com.google.cloud.dialogflow.v2beta1.SummarizationSectionList(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dialogflow.v2beta1.SummarizationSectionList result) { if (summarizationSectionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { summarizationSections_ = java.util.Collections.unmodifiableList(summarizationSections_); bitField0_ = (bitField0_ & ~0x00000001); } result.summarizationSections_ = summarizationSections_; } else { result.summarizationSections_ = summarizationSectionsBuilder_.build(); } } private void buildPartial0( com.google.cloud.dialogflow.v2beta1.SummarizationSectionList result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.SummarizationSectionList) { return mergeFrom((com.google.cloud.dialogflow.v2beta1.SummarizationSectionList) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.SummarizationSectionList other) { if (other == com.google.cloud.dialogflow.v2beta1.SummarizationSectionList.getDefaultInstance()) return this; if (summarizationSectionsBuilder_ == null) { if (!other.summarizationSections_.isEmpty()) { if (summarizationSections_.isEmpty()) { summarizationSections_ = other.summarizationSections_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSummarizationSectionsIsMutable(); summarizationSections_.addAll(other.summarizationSections_); } onChanged(); } } else { if (!other.summarizationSections_.isEmpty()) { if (summarizationSectionsBuilder_.isEmpty()) { summarizationSectionsBuilder_.dispose(); summarizationSectionsBuilder_ = null; summarizationSections_ = other.summarizationSections_; bitField0_ = (bitField0_ & ~0x00000001); summarizationSectionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSummarizationSectionsFieldBuilder() : null; } else { summarizationSectionsBuilder_.addAllMessages(other.summarizationSections_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dialogflow.v2beta1.SummarizationSection m = input.readMessage( com.google.cloud.dialogflow.v2beta1.SummarizationSection.parser(), extensionRegistry); if (summarizationSectionsBuilder_ == null) { ensureSummarizationSectionsIsMutable(); summarizationSections_.add(m); } else { summarizationSectionsBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dialogflow.v2beta1.SummarizationSection> summarizationSections_ = java.util.Collections.emptyList(); private void ensureSummarizationSectionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { summarizationSections_ = new java.util.ArrayList<com.google.cloud.dialogflow.v2beta1.SummarizationSection>( summarizationSections_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.SummarizationSection, com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder, com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder> summarizationSectionsBuilder_; /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public java.util.List<com.google.cloud.dialogflow.v2beta1.SummarizationSection> getSummarizationSectionsList() { if (summarizationSectionsBuilder_ == null) { return java.util.Collections.unmodifiableList(summarizationSections_); } else { return summarizationSectionsBuilder_.getMessageList(); } } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public int getSummarizationSectionsCount() { if (summarizationSectionsBuilder_ == null) { return summarizationSections_.size(); } else { return summarizationSectionsBuilder_.getCount(); } } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.dialogflow.v2beta1.SummarizationSection getSummarizationSections( int index) { if (summarizationSectionsBuilder_ == null) { return summarizationSections_.get(index); } else { return summarizationSectionsBuilder_.getMessage(index); } } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setSummarizationSections( int index, com.google.cloud.dialogflow.v2beta1.SummarizationSection value) { if (summarizationSectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSummarizationSectionsIsMutable(); summarizationSections_.set(index, value); onChanged(); } else { summarizationSectionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setSummarizationSections( int index, com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder builderForValue) { if (summarizationSectionsBuilder_ == null) { ensureSummarizationSectionsIsMutable(); summarizationSections_.set(index, builderForValue.build()); onChanged(); } else { summarizationSectionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addSummarizationSections( com.google.cloud.dialogflow.v2beta1.SummarizationSection value) { if (summarizationSectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSummarizationSectionsIsMutable(); summarizationSections_.add(value); onChanged(); } else { summarizationSectionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addSummarizationSections( int index, com.google.cloud.dialogflow.v2beta1.SummarizationSection value) { if (summarizationSectionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSummarizationSectionsIsMutable(); summarizationSections_.add(index, value); onChanged(); } else { summarizationSectionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addSummarizationSections( com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder builderForValue) { if (summarizationSectionsBuilder_ == null) { ensureSummarizationSectionsIsMutable(); summarizationSections_.add(builderForValue.build()); onChanged(); } else { summarizationSectionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addSummarizationSections( int index, com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder builderForValue) { if (summarizationSectionsBuilder_ == null) { ensureSummarizationSectionsIsMutable(); summarizationSections_.add(index, builderForValue.build()); onChanged(); } else { summarizationSectionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder addAllSummarizationSections( java.lang.Iterable<? extends com.google.cloud.dialogflow.v2beta1.SummarizationSection> values) { if (summarizationSectionsBuilder_ == null) { ensureSummarizationSectionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, summarizationSections_); onChanged(); } else { summarizationSectionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearSummarizationSections() { if (summarizationSectionsBuilder_ == null) { summarizationSections_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { summarizationSectionsBuilder_.clear(); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder removeSummarizationSections(int index) { if (summarizationSectionsBuilder_ == null) { ensureSummarizationSectionsIsMutable(); summarizationSections_.remove(index); onChanged(); } else { summarizationSectionsBuilder_.remove(index); } return this; } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder getSummarizationSectionsBuilder(int index) { return getSummarizationSectionsFieldBuilder().getBuilder(index); } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder getSummarizationSectionsOrBuilder(int index) { if (summarizationSectionsBuilder_ == null) { return summarizationSections_.get(index); } else { return summarizationSectionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public java.util.List< ? extends com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder> getSummarizationSectionsOrBuilderList() { if (summarizationSectionsBuilder_ != null) { return summarizationSectionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(summarizationSections_); } } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder addSummarizationSectionsBuilder() { return getSummarizationSectionsFieldBuilder() .addBuilder( com.google.cloud.dialogflow.v2beta1.SummarizationSection.getDefaultInstance()); } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder addSummarizationSectionsBuilder(int index) { return getSummarizationSectionsFieldBuilder() .addBuilder( index, com.google.cloud.dialogflow.v2beta1.SummarizationSection.getDefaultInstance()); } /** * * * <pre> * Optional. Summarization sections. * </pre> * * <code> * repeated .google.cloud.dialogflow.v2beta1.SummarizationSection summarization_sections = 1 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public java.util.List<com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder> getSummarizationSectionsBuilderList() { return getSummarizationSectionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.SummarizationSection, com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder, com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder> getSummarizationSectionsFieldBuilder() { if (summarizationSectionsBuilder_ == null) { summarizationSectionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.SummarizationSection, com.google.cloud.dialogflow.v2beta1.SummarizationSection.Builder, com.google.cloud.dialogflow.v2beta1.SummarizationSectionOrBuilder>( summarizationSections_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); summarizationSections_ = null; } return summarizationSectionsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.SummarizationSectionList) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.SummarizationSectionList) private static final com.google.cloud.dialogflow.v2beta1.SummarizationSectionList DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.SummarizationSectionList(); } public static com.google.cloud.dialogflow.v2beta1.SummarizationSectionList getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SummarizationSectionList> PARSER = new com.google.protobuf.AbstractParser<SummarizationSectionList>() { @java.lang.Override public SummarizationSectionList parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SummarizationSectionList> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SummarizationSectionList> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SummarizationSectionList getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/geode
35,022
geode-gfsh/src/integrationTest/java/org/apache/geode/management/internal/cli/commands/CreateRegionCommandIntegrationTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.internal.cli.commands; import static org.apache.geode.cache.Region.SEPARATOR; import static org.assertj.core.api.Assertions.assertThat; import java.util.Arrays; import java.util.stream.Collectors; import org.junit.Before; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.EvictionAction; import org.apache.geode.cache.EvictionAlgorithm; import org.apache.geode.cache.EvictionAttributes; import org.apache.geode.cache.ExpirationAction; import org.apache.geode.cache.ExpirationAttributes; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionAttributes; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.util.CacheListenerAdapter; import org.apache.geode.test.junit.categories.RegionsTest; import org.apache.geode.test.junit.rules.GfshCommandRule; import org.apache.geode.test.junit.rules.ServerStarterRule; @Category({RegionsTest.class}) public class CreateRegionCommandIntegrationTest { private static final String CREATE_REGION = "create region --type=REPLICATE "; public static class TestCacheListener extends CacheListenerAdapter<Object, Object> { } public static class TestConstraint { } @ClassRule public static ServerStarterRule server = new ServerStarterRule().withJMXManager().withRegion(RegionShortcut.REPLICATE, "REPLICATED"); @Rule public GfshCommandRule gfsh = new GfshCommandRule(); @Before public void before() throws Exception { gfsh.connectAndVerify(server.getJmxPort(), GfshCommandRule.PortType.jmxManager); } @Test public void parentRegionDoesNotExist() { gfsh.executeAndAssertThat(CREATE_REGION + "--name=" + SEPARATOR + "A" + SEPARATOR + "B") .statusIsError() .containsOutput( "Parent region for \"" + SEPARATOR + "A" + SEPARATOR + "B\" does not exist"); } @Test public void groupDoesNotExist() { gfsh.executeAndAssertThat(CREATE_REGION + "--name=" + SEPARATOR + "FOO --groups=unknown") .statusIsError() .containsOutput("Group(s) \"unknown\" are invalid"); } @Test public void templateRegionDoesNotExist() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --template-region=" + SEPARATOR + "BAR") .statusIsError() .containsOutput("Template region " + SEPARATOR + "BAR does not exist"); } @Test public void conflictingPartitionAttributesWithTemplate() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --template-region=REPLICATED --redundant-copies=2") .statusIsError().containsOutput( "can be used only for creating a Partitioned Region"); } @Test public void conflictingPartitionAttributesWithShortCut() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --redundant-copies=2") .statusIsError().containsOutput("can be used only for creating a Partitioned Region"); } @Test public void colocatedWithRegionDoesNotExist() { gfsh.executeAndAssertThat("create region --type=PARTITION --name=" + SEPARATOR + "FOO --colocated-with=" + SEPARATOR + "BAR") .statusIsError().containsOutput("Specify a valid region path for colocated-with"); } @Test public void colocatedWithRegionIsNotPartitioned() { gfsh.executeAndAssertThat( "create region --type=PARTITION --name=" + SEPARATOR + "FOO --colocated-with=" + SEPARATOR + "REPLICATED") .statusIsError() .containsOutput( "colocated-with \"" + SEPARATOR + "REPLICATED\" is not a Partitioned Region"); } @Test public void negativeLocalMaxMemory() { gfsh.executeAndAssertThat( "create region --type=PARTITION --name=" + SEPARATOR + "FOO --local-max-memory=-1") .statusIsError().containsOutput("PartitionAttributes localMaxMemory must not be negative"); } @Test public void zeroLocalMaxMemoryIsOK() { gfsh.executeAndAssertThat( "create region --type=PARTITION --name=" + SEPARATOR + "FOO --local-max-memory=0") .statusIsSuccess().containsOutput("Region \"" + SEPARATOR + "FOO\" created"); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void negativeTotalMaxMemory() { gfsh.executeAndAssertThat( "create region --type=PARTITION --name=" + SEPARATOR + "FOO --total-max-memory=-1") .statusIsError().containsOutput("Total size of partition region must be > 0"); } @Test public void zeroTotalMaxMemory() { gfsh.executeAndAssertThat( "create region --type=PARTITION --name=" + SEPARATOR + "FOO --total-max-memory=0") .statusIsError().containsOutput("Total size of partition region must be > 0"); } @Test public void redundantCopies() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=PARTITION --redundant-copies=2") .statusIsSuccess().containsOutput("Region \"" + SEPARATOR + "FOO\" created"); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void tooManyredundantCopies() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=PARTITION --redundant-copies=4") .statusIsError().containsOutput("redundant-copies \"4\" is not valid"); } @Test public void keyConstraint() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --key-constraint=abc-def") .statusIsError().containsOutput("Specify a valid class name for key-constraint"); } @Test public void valueConstraint() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --value-constraint=abc-def") .statusIsError() .containsOutput("Specify a valid class name for value-constraint"); } @Test public void ifNotExistsIsIdempotent() { gfsh.executeAndAssertThat( "create region --if-not-exists --type=PARTITION --name=" + SEPARATOR + "FOO --local-max-memory=0") .statusIsSuccess().containsOutput("Region \"" + SEPARATOR + "FOO\" created"); gfsh.executeAndAssertThat( "create region --skip-if-exists --type=PARTITION --name=" + SEPARATOR + "FOO --local-max-memory=0") .statusIsSuccess() .containsOutput("Region " + SEPARATOR + "FOO already exists on these members: server."); gfsh.executeAndAssertThat( "create region --if-not-exists --type=PARTITION --name=" + SEPARATOR + "FOO --local-max-memory=0") .statusIsSuccess() .containsOutput("Region " + SEPARATOR + "FOO already exists on these members: server."); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void invalidCacheListener() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --cache-listener=abc-def") .statusIsError().containsOutput( "java.lang.IllegalArgumentException: Failed to convert 'abc-def' to type ClassName"); } @Test public void invalidCacheLoader() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --cache-loader=abc-def") .statusIsError().containsOutput( "java.lang.IllegalArgumentException: Failed to convert 'abc-def' to type ClassName"); } @Test public void invalidCacheWriter() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --cache-writer=abc-def") .statusIsError().containsOutput( "java.lang.IllegalArgumentException: Failed to convert 'abc-def' to type ClassName"); } @Test public void invalidGatewaySenders() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --gateway-sender-id=unknown") .statusIsError() .containsOutput("There are no GatewaySenders defined currently in the system"); } // TODO: Write test for invalid gateway name (gateways already need to exist). @Test public void invalidConcurrencyLevel() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --template-region=" + SEPARATOR + "REPLICATED --concurrency-level=-1") .statusIsError().containsOutput("Specify positive integer value for concurrency-level"); } @Test public void nonPersistentRegionWithdiskStore() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --disk-store=unknown") .statusIsError() .containsOutput("Only regions with persistence or overflow to disk can specify DiskStore"); } @Test public void nonPersistentTemplateWithdiskStore() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --template-region=" + SEPARATOR + "REPLICATED --disk-store=unknown") .statusIsError() .containsOutput("template-region region \"" + SEPARATOR + "REPLICATED\" is not persistent") .containsOutput("Only regions with persistence or overflow to disk can specify DiskStore"); } @Test public void nonPersistentReplicateOverflowRegionWithdiskStore() { gfsh.executeAndAssertThat( "create disk-store --name=DISKSTORE --dir=DISKSTORE --auto-compact=false" + " --compaction-threshold=99 --max-oplog-size=1 --allow-force-compaction=true") .statusIsSuccess() .doesNotContainOutput("Did not complete waiting"); gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "OVERFLOW --type=REPLICATE_OVERFLOW" + " --eviction-action=overflow-to-disk --eviction-entry-count=1000 --disk-store=DISKSTORE") .statusIsSuccess(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "OVERFLOW").statusIsSuccess(); gfsh.executeAndAssertThat("destroy disk-store --name=DISKSTORE").statusIsSuccess(); } @Test public void nonPersistentPartitionOverflowRegionWithdiskStore() { gfsh.executeAndAssertThat( "create disk-store --name=DISKSTORE --dir=DISKSTORE --auto-compact=false" + " --compaction-threshold=99 --max-oplog-size=1 --allow-force-compaction=true") .statusIsSuccess() .doesNotContainOutput("Did not complete waiting"); gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "OVERFLOW --type=PARTITION_OVERFLOW" + " --eviction-action=overflow-to-disk --eviction-entry-count=1000 --disk-store=DISKSTORE") .statusIsSuccess(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "OVERFLOW").statusIsSuccess(); gfsh.executeAndAssertThat("destroy disk-store --name=DISKSTORE").statusIsSuccess(); } @Test public void nonPersistentTemplateOverflowRegionWithdiskStore() { gfsh.executeAndAssertThat( "create disk-store --name=DISKSTORE --dir=DISKSTORE --auto-compact=false" + " --compaction-threshold=99 --max-oplog-size=1 --allow-force-compaction=true") .statusIsSuccess() .doesNotContainOutput("Did not complete waiting"); gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "OVERFLOW --type=PARTITION_OVERFLOW" + " --eviction-action=overflow-to-disk --eviction-entry-count=1000 --disk-store=DISKSTORE") .statusIsSuccess(); gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "TEMPLATE --template-region=" + SEPARATOR + "OVERFLOW --disk-store=DISKSTORE") .statusIsSuccess(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "TEMPLATE").statusIsSuccess(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "OVERFLOW").statusIsSuccess(); gfsh.executeAndAssertThat("destroy disk-store --name=DISKSTORE").statusIsSuccess(); } @Test public void invalidDiskStore() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE_PERSISTENT --disk-store=unknown") .statusIsError() .containsOutput("Specify valid disk-store. Unknown Disk Store : \"unknown\""); } @Test public void entryIdleTimeWithoutStatisticsEnabled() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --entry-idle-time-expiration=1") .statusIsError() .containsOutput("Statistics must be enabled for expiration"); } @Test public void invalidCompressor() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --compressor=java.lang.String") .statusIsError() .containsOutput("java.lang.String cannot be cast to ") .containsOutput("org.apache.geode.compression.Compressor"); } @Test public void validateDefaultExpirationAttributes() { gfsh.executeAndAssertThat("create region --name=" + SEPARATOR + "A --type=REPLICATE") .statusIsSuccess(); Region region = server.getCache().getRegion(SEPARATOR + "A"); RegionAttributes attributes = region.getAttributes(); ExpirationAttributes entryIdle = attributes.getEntryIdleTimeout(); ExpirationAttributes entryTTL = attributes.getEntryTimeToLive(); ExpirationAttributes regionIdle = attributes.getRegionIdleTimeout(); ExpirationAttributes regionTTL = attributes.getRegionTimeToLive(); assertThat(entryIdle).isNotNull(); assertThat(entryIdle.getTimeout()).isEqualTo(0); assertThat(entryIdle.getAction()).isEqualTo(ExpirationAction.INVALIDATE); assertThat(entryTTL).isNotNull(); assertThat(entryTTL.getTimeout()).isEqualTo(0); assertThat(entryTTL.getAction()).isEqualTo(ExpirationAction.INVALIDATE); assertThat(regionIdle).isNotNull(); assertThat(regionIdle.getTimeout()).isEqualTo(0); assertThat(regionIdle.getAction()).isEqualTo(ExpirationAction.INVALIDATE); assertThat(regionTTL).isNotNull(); assertThat(regionTTL.getTimeout()).isEqualTo(0); assertThat(regionTTL.getAction()).isEqualTo(ExpirationAction.INVALIDATE); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "A").statusIsSuccess(); } @Test public void validateNonDefaultBinaryOptions() { gfsh.executeAndAssertThat("create region --name=" + SEPARATOR + "FOO --type=REPLICATE" + " --enable-async-conflation" + " --enable-cloning" + " --enable-concurrency-checks=false" + " --enable-multicast" + " --enable-statistics" + " --enable-subscription-conflation" + " --enable-synchronous-disk=false").statusIsSuccess(); Region foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getEnableAsyncConflation()).isTrue(); assertThat(foo.getAttributes().getCloningEnabled()).isTrue(); assertThat(foo.getAttributes().getConcurrencyChecksEnabled()).isFalse(); assertThat(foo.getAttributes().getMulticastEnabled()).isTrue(); assertThat(foo.getAttributes().getStatisticsEnabled()).isTrue(); assertThat(foo.getAttributes().getEnableSubscriptionConflation()).isTrue(); assertThat(foo.getAttributes().isDiskSynchronous()).isFalse(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void validateExpirationOptions() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE" + " --enable-statistics" + " --entry-idle-time-expiration=3" + " --entry-idle-time-expiration-action=DESTROY" + " --entry-time-to-live-expiration=5" + " --entry-time-to-live-expiration-action=DESTROY" + " --region-idle-time-expiration=7" + " --region-idle-time-expiration-action=DESTROY" + " --region-time-to-live-expiration=11" + " --region-time-to-live-expiration-action=DESTROY") .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getStatisticsEnabled()).isTrue(); assertThat(foo.getAttributes().getEntryIdleTimeout().getTimeout()).isEqualTo(3); assertThat(foo.getAttributes().getEntryIdleTimeout().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(foo.getAttributes().getEntryTimeToLive().getTimeout()).isEqualTo(5); assertThat(foo.getAttributes().getEntryTimeToLive().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(foo.getAttributes().getRegionIdleTimeout().getTimeout()).isEqualTo(7); assertThat(foo.getAttributes().getRegionIdleTimeout().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(foo.getAttributes().getRegionTimeToLive().getTimeout()).isEqualTo(11); assertThat(foo.getAttributes().getRegionTimeToLive().getAction()) .isEqualTo(ExpirationAction.DESTROY); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @SuppressWarnings("deprecation") @Test public void validatePartitionRegionOptions() { gfsh.executeAndAssertThat("create region --name=" + SEPARATOR + "FOO --type=PARTITION_REDUNDANT" + " --local-max-memory=1001" + " --recovery-delay=7" + " --redundant-copies=1" + " --startup-recovery-delay=5" + " --total-max-memory=2001" + " --total-num-buckets=11" + " --partition-resolver=" + TestPartitionResolver.class.getName()).statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getPartitionAttributes().getLocalMaxMemory()).isEqualTo(1001); assertThat(foo.getAttributes().getPartitionAttributes().getRecoveryDelay()).isEqualTo(7); assertThat(foo.getAttributes().getPartitionAttributes().getRedundantCopies()).isEqualTo(1); assertThat(foo.getAttributes().getPartitionAttributes().getStartupRecoveryDelay()).isEqualTo(5); assertThat(foo.getAttributes().getPartitionAttributes().getTotalMaxMemory()).isEqualTo(2001); assertThat(foo.getAttributes().getPartitionAttributes().getTotalNumBuckets()).isEqualTo(11); assertThat( foo.getAttributes().getPartitionAttributes().getPartitionResolver().getClass().getName()) .isEqualTo(TestPartitionResolver.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void validateCallbackOptions() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=PARTITION_REDUNDANT --cache-listener=" + TestCacheListener.class.getName() + " --cache-loader=" + TestCacheLoader.class.getName() + " --cache-writer=" + TestCacheWriter.class.getName() + " --compressor=" + TestCompressor.class.getName()) .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(Arrays.stream(foo.getAttributes().getCacheListeners()) .map(c -> c.getClass().getName()).collect(Collectors.toSet())) .contains(TestCacheListener.class.getName()); assertThat(foo.getAttributes().getCacheLoader().getClass().getName()) .isEqualTo(TestCacheLoader.class.getName()); assertThat(foo.getAttributes().getCacheWriter().getClass().getName()) .isEqualTo(TestCacheWriter.class.getName()); assertThat(foo.getAttributes().getCompressor().getClass().getName()) .isEqualTo(TestCompressor.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void validateConstraints() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE" + " --key-constraint=" + TestConstraint.class.getName() + " --value-constraint=" + TestConstraint.class.getName()) .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getKeyConstraint().getName()) .isEqualTo(TestConstraint.class.getName()); assertThat(foo.getAttributes().getValueConstraint().getName()) .isEqualTo(TestConstraint.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void validateEntryIdleTimeExpiration() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "FOO --type=REPLICATE --entry-idle-time-expiration=7 --enable-statistics") .statusIsSuccess(); Region<?, ?> template = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(template.getAttributes().getEntryIdleTimeout().getTimeout()).isEqualTo(7); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void validateTemplateRegionAttributesForReplicate() { gfsh.executeAndAssertThat("create region --name=" + SEPARATOR + "TEMPLATE --type=REPLICATE" + " --enable-async-conflation" + " --enable-cloning" + " --enable-concurrency-checks=false" + " --enable-multicast" + " --enable-statistics" + " --enable-subscription-conflation" + " --enable-synchronous-disk=false" + " --entry-idle-time-expiration=3" + " --entry-idle-time-expiration-action=DESTROY" + " --entry-time-to-live-expiration=5" + " --entry-time-to-live-expiration-action=DESTROY" + " --region-idle-time-expiration=7" + " --region-idle-time-expiration-action=DESTROY" + " --region-time-to-live-expiration=11" + " --region-time-to-live-expiration-action=DESTROY" + " --cache-listener=" + TestCacheListener.class.getName() + " --cache-loader=" + TestCacheLoader.class.getName() + " --cache-writer=" + TestCacheWriter.class.getName() + " --compressor=" + TestCompressor.class.getName() + " --key-constraint=" + TestConstraint.class.getName() + " --value-constraint=" + TestConstraint.class.getName()).statusIsSuccess(); gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "COPY --template-region=" + SEPARATOR + "TEMPLATE") .statusIsSuccess(); Region<?, ?> copy = server.getCache().getRegion(SEPARATOR + "COPY"); assertThat(copy.getAttributes().getStatisticsEnabled()).isTrue(); assertThat(copy.getAttributes().getEnableAsyncConflation()).isTrue(); assertThat(copy.getAttributes().getCloningEnabled()).isTrue(); assertThat(copy.getAttributes().getConcurrencyChecksEnabled()).isFalse(); assertThat(copy.getAttributes().getMulticastEnabled()).isTrue(); assertThat(copy.getAttributes().getStatisticsEnabled()).isTrue(); assertThat(copy.getAttributes().getEnableSubscriptionConflation()).isTrue(); assertThat(copy.getAttributes().isDiskSynchronous()).isFalse(); assertThat(copy.getAttributes().getEntryIdleTimeout().getTimeout()).isEqualTo(3); assertThat(copy.getAttributes().getEntryIdleTimeout().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(copy.getAttributes().getEntryTimeToLive().getTimeout()).isEqualTo(5); assertThat(copy.getAttributes().getEntryTimeToLive().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(copy.getAttributes().getRegionIdleTimeout().getTimeout()).isEqualTo(7); assertThat(copy.getAttributes().getRegionIdleTimeout().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(copy.getAttributes().getRegionTimeToLive().getTimeout()).isEqualTo(11); assertThat(copy.getAttributes().getRegionTimeToLive().getAction()) .isEqualTo(ExpirationAction.DESTROY); assertThat(Arrays.stream(copy.getAttributes().getCacheListeners()) .map(c -> c.getClass().getName()).collect(Collectors.toSet())) .contains(TestCacheListener.class.getName()); assertThat(copy.getAttributes().getCacheLoader().getClass().getName()) .isEqualTo(TestCacheLoader.class.getName()); assertThat(copy.getAttributes().getCacheWriter().getClass().getName()) .isEqualTo(TestCacheWriter.class.getName()); assertThat(copy.getAttributes().getCompressor().getClass().getName()) .isEqualTo(TestCompressor.class.getName()); assertThat(copy.getAttributes().getKeyConstraint().getName()) .isEqualTo(TestConstraint.class.getName()); assertThat(copy.getAttributes().getValueConstraint().getName()) .isEqualTo(TestConstraint.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "COPY").statusIsSuccess(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "TEMPLATE").statusIsSuccess(); } @Test @SuppressWarnings("deprecation") public void validateTemplateRegionAttributesForPartitionRedundant() { gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "TEMPLATE --type=PARTITION_REDUNDANT" + " --enable-async-conflation" + " --enable-cloning" + " --enable-concurrency-checks=false" + " --enable-multicast" + " --enable-statistics" + " --enable-subscription-conflation" + " --enable-synchronous-disk=false" + " --cache-listener=" + TestCacheListener.class.getName() + " --cache-loader=" + TestCacheLoader.class.getName() + " --cache-writer=" + TestCacheWriter.class.getName() + " --compressor=" + TestCompressor.class.getName() + " --key-constraint=" + TestConstraint.class.getName() + " --value-constraint=" + TestConstraint.class.getName() + " --local-max-memory=1001" + " --recovery-delay=7" + " --redundant-copies=1" + " --startup-recovery-delay=5" + " --total-max-memory=2001" + " --total-num-buckets=11" + " --partition-resolver=" + TestPartitionResolver.class.getName()) .statusIsSuccess(); gfsh.executeAndAssertThat( "create region --name=" + SEPARATOR + "COPY --template-region=" + SEPARATOR + "TEMPLATE") .statusIsSuccess(); Region<?, ?> copy = server.getCache().getRegion(SEPARATOR + "COPY"); assertThat(copy.getAttributes().getStatisticsEnabled()).isTrue(); assertThat(copy.getAttributes().getEnableAsyncConflation()).isTrue(); assertThat(copy.getAttributes().getCloningEnabled()).isTrue(); assertThat(copy.getAttributes().getConcurrencyChecksEnabled()).isFalse(); assertThat(copy.getAttributes().getMulticastEnabled()).isTrue(); assertThat(copy.getAttributes().getStatisticsEnabled()).isTrue(); assertThat(copy.getAttributes().getEnableSubscriptionConflation()).isTrue(); assertThat(copy.getAttributes().isDiskSynchronous()).isFalse(); assertThat(Arrays.stream(copy.getAttributes().getCacheListeners()) .map(c -> c.getClass().getName()).collect(Collectors.toSet())) .contains(TestCacheListener.class.getName()); assertThat(copy.getAttributes().getCacheLoader().getClass().getName()) .isEqualTo(TestCacheLoader.class.getName()); assertThat(copy.getAttributes().getCacheWriter().getClass().getName()) .isEqualTo(TestCacheWriter.class.getName()); assertThat(copy.getAttributes().getCompressor().getClass().getName()) .isEqualTo(TestCompressor.class.getName()); assertThat(copy.getAttributes().getKeyConstraint().getName()) .isEqualTo(TestConstraint.class.getName()); assertThat(copy.getAttributes().getValueConstraint().getName()) .isEqualTo(TestConstraint.class.getName()); assertThat(copy.getAttributes().getPartitionAttributes().getLocalMaxMemory()).isEqualTo(1001); assertThat(copy.getAttributes().getPartitionAttributes().getRecoveryDelay()).isEqualTo(7); assertThat(copy.getAttributes().getPartitionAttributes().getRedundantCopies()).isEqualTo(1); assertThat(copy.getAttributes().getPartitionAttributes().getStartupRecoveryDelay()) .isEqualTo(5); assertThat(copy.getAttributes().getPartitionAttributes().getTotalMaxMemory()).isEqualTo(2001); assertThat(copy.getAttributes().getPartitionAttributes().getTotalNumBuckets()).isEqualTo(11); assertThat( copy.getAttributes().getPartitionAttributes().getPartitionResolver().getClass().getName()) .isEqualTo(TestPartitionResolver.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "COPY").statusIsSuccess(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "TEMPLATE").statusIsSuccess(); } @Test public void cannotSetRegionExpirationForPartitionedRegion() { gfsh.executeAndAssertThat( "create region --enable-statistics=true --name=" + SEPARATOR + "FOO --type=PARTITION " + "--region-idle-time-expiration=1 --region-time-to-live-expiration=1") .statusIsError() .containsOutput( "ExpirationAction INVALIDATE or LOCAL_INVALIDATE for region is not supported for Partitioned Region"); } @Test public void testEvictionAttributesForLRUHeap() { gfsh.executeAndAssertThat( "create region --name=FOO --type=REPLICATE --eviction-action=local-destroy") .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getEvictionAttributes().getAction()) .isEqualTo(EvictionAction.LOCAL_DESTROY); assertThat(foo.getAttributes().getEvictionAttributes().getAlgorithm()) .isEqualTo(EvictionAlgorithm.LRU_HEAP); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void testEvictionAttributesForLRUHeapWithObjectSizer() { gfsh.executeAndAssertThat( "create region --name=FOO --type=REPLICATE --eviction-action=local-destroy --eviction-object-sizer=" + TestObjectSizer.class.getName()) .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getEvictionAttributes().getAction()) .isEqualTo(EvictionAction.LOCAL_DESTROY); assertThat(foo.getAttributes().getEvictionAttributes().getAlgorithm()) .isEqualTo(EvictionAlgorithm.LRU_HEAP); assertThat(foo.getAttributes().getEvictionAttributes().getObjectSizer().getClass().getName()) .isEqualTo(TestObjectSizer.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void testEvictionAttributesForLRUEntry() { gfsh.executeAndAssertThat( "create region --name=FOO --type=REPLICATE --eviction-entry-count=1001 " + "--eviction-action=overflow-to-disk") .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getEvictionAttributes().getAction()) .isEqualTo(EvictionAction.OVERFLOW_TO_DISK); assertThat(foo.getAttributes().getEvictionAttributes().getAlgorithm()) .isEqualTo(EvictionAlgorithm.LRU_ENTRY); assertThat(foo.getAttributes().getEvictionAttributes().getMaximum()).isEqualTo(1001); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void testEvictionAttributesForLRUMemory() { gfsh.executeAndAssertThat( "create region --name=FOO --type=REPLICATE --eviction-max-memory=1001 " + "--eviction-action=overflow-to-disk") .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); assertThat(foo.getAttributes().getEvictionAttributes().getAction()) .isEqualTo(EvictionAction.OVERFLOW_TO_DISK); assertThat(foo.getAttributes().getEvictionAttributes().getAlgorithm()) .isEqualTo(EvictionAlgorithm.LRU_MEMORY); assertThat(foo.getAttributes().getEvictionAttributes().getMaximum()).isEqualTo(1001); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void testEvictionAttributesForObjectSizer() { gfsh.executeAndAssertThat( "create region --name=FOO --type=REPLICATE --eviction-max-memory=1001 " + "--eviction-action=overflow-to-disk --eviction-object-sizer=" + TestObjectSizer.class.getName()) .statusIsSuccess(); Region<?, ?> foo = server.getCache().getRegion(SEPARATOR + "FOO"); EvictionAttributes attrs = foo.getAttributes().getEvictionAttributes(); assertThat(attrs.getAction()).isEqualTo(EvictionAction.OVERFLOW_TO_DISK); assertThat(attrs.getAlgorithm()).isEqualTo(EvictionAlgorithm.LRU_MEMORY); assertThat(attrs.getMaximum()).isEqualTo(1001); assertThat(attrs.getObjectSizer().getClass().getName()) .isEqualTo(TestObjectSizer.class.getName()); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "FOO").statusIsSuccess(); } @Test public void testEvictionAttributesForNonDeclarableObjectSizer() { gfsh.executeAndAssertThat( "create region --name=FOO --type=REPLICATE --eviction-max-memory=1001 " + "--eviction-action=overflow-to-disk --eviction-object-sizer=" + TestObjectSizerNotDeclarable.class.getName()) .statusIsError().containsOutput( "eviction-object-sizer must implement both ObjectSizer and Declarable interfaces"); } @Test public void createRegionWithCacheListenerWithInvalidJson() { gfsh.executeAndAssertThat("create region --name=FOO --type=REPLICATE --cache-listener=abc{abc}") .statusIsError().containsOutput("Invalid JSON: {abc}"); } @Test public void createSubRegion() { gfsh.executeAndAssertThat("create region --name=region --type=REPLICATE").statusIsSuccess(); gfsh.executeAndAssertThat( "create region --name=region" + SEPARATOR + "region1 --type=REPLICATE") .statusIsSuccess(); Region<?, ?> subregion = server.getCache().getRegion(SEPARATOR + "region" + SEPARATOR + "region1"); assertThat(subregion).isNotNull(); gfsh.executeAndAssertThat("destroy region --name=" + SEPARATOR + "region").statusIsSuccess(); } }
apache/hudi
35,495
hudi-client/hudi-spark-client/src/test/java/org/apache/hudi/index/bloom/TestHoodieBloomIndex.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hudi.index.bloom; import org.apache.hudi.client.functional.TestHoodieMetadataBase; import org.apache.hudi.common.bloom.BloomFilter; import org.apache.hudi.common.bloom.BloomFilterFactory; import org.apache.hudi.common.bloom.BloomFilterTypeCode; import org.apache.hudi.common.config.HoodieMetadataConfig; import org.apache.hudi.common.model.HoodieAvroRecord; import org.apache.hudi.common.model.HoodieEmptyRecord; import org.apache.hudi.common.model.HoodieFileGroupId; import org.apache.hudi.common.model.HoodieKey; import org.apache.hudi.common.model.HoodieRecord; import org.apache.hudi.common.model.WriteOperationType; import org.apache.hudi.common.table.HoodieTableMetaClient; import org.apache.hudi.common.util.Option; import org.apache.hudi.common.util.collection.ImmutablePair; import org.apache.hudi.common.util.collection.Pair; import org.apache.hudi.config.HoodieIndexConfig; import org.apache.hudi.config.HoodieWriteConfig; import org.apache.hudi.data.HoodieJavaPairRDD; import org.apache.hudi.index.HoodieIndex; import org.apache.hudi.index.HoodieIndexUtils; import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter; import org.apache.hudi.storage.StoragePath; import org.apache.hudi.table.HoodieSparkTable; import org.apache.hudi.table.HoodieTable; import org.apache.hudi.testutils.HoodieSparkWriteableTestTable; import org.apache.avro.Schema; import org.apache.spark.api.java.JavaPairRDD; import org.apache.spark.api.java.JavaRDD; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; import scala.Tuple2; import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.genPseudoRandomUUID; import static org.apache.hudi.common.testutils.HoodieTestUtils.createSimpleRecord; import static org.apache.hudi.common.testutils.SchemaTestUtil.getSchemaFromResource; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class TestHoodieBloomIndex extends TestHoodieMetadataBase { private static final Schema SCHEMA = getSchemaFromResource(TestHoodieBloomIndex.class, "/exampleSchema.avsc", true); private static final String TEST_NAME_WITH_PARAMS = "[{index}] Test with rangePruning={0}, treeFiltering={1}, bucketizedChecking={2}, " + "useMetadataTable={3}, enableFileGroupIdKeySorting={4}"; private static final Random RANDOM = new Random(0xDEED); public static Stream<Arguments> configParams() { // rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting Object[][] data = new Object[][] { {true, true, true, false, false}, {false, true, true, false, false}, {true, true, false, false, false}, {true, false, true, false, false}, {true, true, true, true, false}, {false, true, true, true, false}, {true, true, false, true, false}, {true, false, true, true, false}, {true, true, false, false, true}, {true, false, false, false, true}, {false, false, false, false, true}, {false, true, false, false, true} }; return Stream.of(data).map(Arguments::of); } @BeforeEach public void setUp() throws Exception { initSparkContexts(); initPath(); initHoodieStorage(); // We have some records to be tagged (two different partitions) initMetaClient(); HoodieIndexConfig.Builder indexBuilder = HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.BLOOM); HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath) .withIndexConfig(indexBuilder.build()) .build(); writeClient = getHoodieWriteClient(config); } @AfterEach public void tearDown() throws Exception { cleanupResources(); } private HoodieWriteConfig makeConfig( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) { // For the bloom index to use column stats and bloom filters from metadata table, // the following configs must be set to true: // "hoodie.bloom.index.use.metadata" // "hoodie.metadata.enable" (by default is true) // "hoodie.metadata.index.column.stats.enable" // "hoodie.metadata.index.bloom.filter.enable" return HoodieWriteConfig.newBuilder().withPath(basePath) .withIndexConfig(HoodieIndexConfig.newBuilder() .bloomIndexPruneByRanges(rangePruning) .bloomIndexTreebasedFilter(treeFiltering) .bloomIndexBucketizedChecking(bucketizedChecking) .bloomIndexKeysPerBucket(2) .bloomIndexUseMetadata(useMetadataTable) .enableBloomIndexFileGroupIdKeySorting(enableFileGroupIdKeySorting) .build()) .withMetadataConfig(HoodieMetadataConfig.newBuilder() .withMetadataIndexBloomFilter(useMetadataTable) .withMetadataIndexColumnStats(useMetadataTable) .build()) .build(); } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testLoadInvolvedFiles( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) throws Exception { HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); HoodieBloomIndex index = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); HoodieTable hoodieTable = HoodieSparkTable.create(config, context, metaClient); metadataWriter = SparkHoodieBackedTableMetadataWriter.create(storageConf, config, context); HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(metaClient, SCHEMA, metadataWriter, Option.of(context)); // Create some partitions, and put some files // "2016/01/21": 0 file // "2016/04/01": 1 file (2_0_20160401010101.parquet) // "2015/03/12": 3 files (1_0_20150312101010.parquet, 3_0_20150312101010.parquet, 4_0_20150312101010.parquet) testTable.withPartitionMetaFiles("2016/01/21", "2016/04/01", "2015/03/12"); HoodieRecord record1 = createSimpleRecord("000", "2016-01-31T03:16:41.415Z", 12); HoodieRecord record2 = createSimpleRecord("001", "2016-01-31T03:16:41.415Z", 12); HoodieRecord record3 = createSimpleRecord("002", "2016-01-31T03:16:41.415Z", 12); HoodieRecord record4 = createSimpleRecord("003", "2016-01-31T03:16:41.415Z", 12); List<String> partitions = Arrays.asList("2016/01/21", "2016/04/01", "2015/03/12"); List<Pair<String, BloomIndexFileInfo>> filesList = index.loadColumnRangesFromFiles(partitions, context, hoodieTable); // Still 0, as no valid commit assertEquals(0, filesList.size()); final String fileId1 = "1"; final String fileId2 = "2"; final String fileId3 = "3"; final String fileId4 = "4"; final Map<String, List<Pair<String, Integer>>> partitionToFilesNameLengthMap = new HashMap<>(); String commitTime = "20160401010101"; StoragePath baseFilePath = testTable.forCommit(commitTime) .withInserts(partitions.get(1), fileId2, Collections.emptyList()); long baseFileLength = storage.getPathInfo(new StoragePath(baseFilePath.toUri())).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partitions.get(1), k -> new ArrayList<>()).add(Pair.of(fileId2, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commitTime, WriteOperationType.UPSERT, Arrays.asList(partitions.get(1)), partitionToFilesNameLengthMap, false, false); commitTime = "20150312101010"; partitionToFilesNameLengthMap.clear(); testTable.forCommit(commitTime); baseFilePath = testTable.withInserts(partitions.get(2), fileId1, Collections.emptyList()); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partitions.get(2), k -> new ArrayList<>()).add(Pair.of(fileId1, Integer.valueOf((int) baseFileLength))); baseFilePath = testTable.withInserts(partitions.get(2), fileId3, Collections.singletonList(record1)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partitions.get(2), k -> new ArrayList<>()).add(Pair.of(fileId3, Integer.valueOf((int) baseFileLength))); baseFilePath = testTable.withInserts(partitions.get(2), fileId4, Arrays.asList(record2, record3, record4)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partitions.get(2), k -> new ArrayList<>()).add(Pair.of(fileId4, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commitTime, WriteOperationType.UPSERT, Arrays.asList(partitions.get(2)), partitionToFilesNameLengthMap, false, false); filesList = index.loadColumnRangesFromFiles(partitions, context, hoodieTable); assertEquals(4, filesList.size()); if (rangePruning) { // these files will not have the key ranges assertNull(filesList.get(0).getRight().getMaxRecordKey()); assertNull(filesList.get(0).getRight().getMinRecordKey()); assertFalse(filesList.get(1).getRight().hasKeyRanges()); assertNotNull(filesList.get(2).getRight().getMaxRecordKey()); assertNotNull(filesList.get(2).getRight().getMinRecordKey()); assertTrue(filesList.get(3).getRight().hasKeyRanges()); // no longer sorted, but should have same files. List<ImmutablePair<String, BloomIndexFileInfo>> expected = Arrays.asList(new ImmutablePair<>("2016/04/01", new BloomIndexFileInfo("2")), new ImmutablePair<>("2015/03/12", new BloomIndexFileInfo("1")), new ImmutablePair<>("2015/03/12", new BloomIndexFileInfo("3", "000", "000")), new ImmutablePair<>("2015/03/12", new BloomIndexFileInfo("4", "001", "003"))); assertEquals(expected, filesList); } } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testRangePruning( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) { HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); HoodieBloomIndex index = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); final Map<String, List<BloomIndexFileInfo>> partitionToFileIndexInfo = new HashMap<>(); partitionToFileIndexInfo.put("2017/10/22", Arrays.asList(new BloomIndexFileInfo("f1"), new BloomIndexFileInfo("f2", "000", "000"), new BloomIndexFileInfo("f3", "001", "003"), new BloomIndexFileInfo("f4", "002", "007"), new BloomIndexFileInfo("f5", "009", "010"))); JavaPairRDD<String, String> partitionRecordKeyPairRDD = jsc.parallelize(Arrays.asList(new Tuple2<>("2017/10/22", "003"), new Tuple2<>("2017/10/22", "002"), new Tuple2<>("2017/10/22", "005"), new Tuple2<>("2017/10/22", "004"))).mapToPair(t -> t); List<Pair<HoodieFileGroupId, String>> comparisonKeyList = index.explodeRecordsWithFileComparisons(partitionToFileIndexInfo, HoodieJavaPairRDD.of(partitionRecordKeyPairRDD)).collectAsList(); assertEquals(10, comparisonKeyList.size()); Map<String, List<String>> recordKeyToFileComps = comparisonKeyList.stream() .collect( Collectors.groupingBy(t -> t.getRight(), Collectors.mapping(t -> t.getLeft().getFileId(), Collectors.toList()))); assertEquals(4, recordKeyToFileComps.size()); assertEquals(new HashSet<>(Arrays.asList("f1", "f3", "f4")), new HashSet<>(recordKeyToFileComps.get("002"))); assertEquals(new HashSet<>(Arrays.asList("f1", "f3", "f4")), new HashSet<>(recordKeyToFileComps.get("003"))); assertEquals(new HashSet<>(Arrays.asList("f1", "f4")), new HashSet<>(recordKeyToFileComps.get("004"))); assertEquals(new HashSet<>(Arrays.asList("f1", "f4")), new HashSet<>(recordKeyToFileComps.get("005"))); } @Test public void testCheckUUIDsAgainstOneFile() throws Exception { final String partition = "2016/01/31"; // Create some records to use HoodieRecord record1 = createSimpleRecord("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0", "2016-01-31T03:16:41.415Z", 12); HoodieRecord record2 = createSimpleRecord("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0", "2016-01-31T03:20:41.415Z", 100); HoodieRecord record3 = createSimpleRecord("3eb5b87c-1fej-4edd-87b4-6ec96dc405a0", "2016-01-31T03:16:41.415Z", 15); HoodieRecord record4 = createSimpleRecord("4eb5b87c-1fej-4edd-87b4-6ec96dc405a0", "2016-01-31T03:16:41.415Z", 32); // We write record1, record2 to a parquet file, but the bloom filter contains (record1, // record2, record3). BloomFilter filter = BloomFilterFactory.createBloomFilter(10000, 0.0000001, -1, BloomFilterTypeCode.SIMPLE.name()); filter.add(record3.getRecordKey()); HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(metaClient, SCHEMA, filter, metadataWriter, Option.of(context)); final Map<String, List<Pair<String, Integer>>> partitionToFilesNameLengthMap = new HashMap<>(); final String commitTime = "0000001"; final String fileId = genRandomUUID(); StoragePath baseFilePath = testTable.forCommit(commitTime) .withInserts(partition, fileId, Arrays.asList(record1, record2)); long baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partition, k -> new ArrayList<>()).add(Pair.of(fileId, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commitTime, WriteOperationType.UPSERT, Collections.singletonList(partition), partitionToFilesNameLengthMap, false, false); final String filename = testTable.getBaseFileNameById(fileId); // The bloom filter contains 3 records assertTrue(filter.mightContain(record1.getRecordKey())); assertTrue(filter.mightContain(record2.getRecordKey())); assertTrue(filter.mightContain(record3.getRecordKey())); assertFalse(filter.mightContain(record4.getRecordKey())); // Compare with file List<String> uuids = Arrays.asList(record1.getRecordKey(), record2.getRecordKey(), record3.getRecordKey(), record4.getRecordKey()); HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).build(); List<Pair<String, Long>> results = HoodieIndexUtils.filterKeysFromFile( new StoragePath(Paths.get(basePath, partition, filename).toString()), uuids, storage); assertEquals(results.size(), 2); assertTrue(results.get(0).getLeft().equals("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0") || results.get(1).getLeft().equals("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0")); assertTrue(results.get(0).getLeft().equals("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0") || results.get(1).getLeft().equals("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0")); // TODO(vc): Need more coverage on actual filenames // assertTrue(results.get(0)._2().equals(filename)); // assertTrue(results.get(1)._2().equals(filename)); } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testTagLocationWithEmptyRDD( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) { // We have some records to be tagged (two different partitions) JavaRDD<HoodieRecord> recordRDD = jsc.emptyRDD(); // Also create the metadata and config HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); metaClient = HoodieTableMetaClient.reload(metaClient); HoodieSparkTable table = HoodieSparkTable.create(config, context, metaClient); // Let's tag HoodieBloomIndex bloomIndex = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); assertDoesNotThrow(() -> { tagLocation(bloomIndex, recordRDD, table); }, "EmptyRDD should not result in IllegalArgumentException: Positive number of slices required"); } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testTagLocationOnPartitionedTable( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) throws Exception { // We have some records to be tagged (two different partitions) String rowKey1 = genRandomUUID(); String rowKey2 = genRandomUUID(); String rowKey3 = genRandomUUID(); HoodieRecord record1 = createSimpleRecord(rowKey1, "2016-01-31T03:16:41.415Z", 12); HoodieRecord record2 = createSimpleRecord(rowKey2, "2016-01-31T03:20:41.415Z", 100); HoodieRecord record3 = createSimpleRecord(rowKey3, "2016-01-31T03:16:41.415Z", 15); // place same row key under a different partition. HoodieRecord record4 = createSimpleRecord(rowKey1, "2015-01-31T03:16:41.415Z", 32); JavaRDD<HoodieRecord> recordRDD = jsc.parallelize(Arrays.asList(record1, record2, record3, record4)); // Also create the metadata and config HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); HoodieSparkTable hoodieTable = HoodieSparkTable.create(config, context, metaClient); metadataWriter = SparkHoodieBackedTableMetadataWriter.create(storageConf, config, context); HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(metaClient, SCHEMA, metadataWriter, Option.of(context)); // Let's tag HoodieBloomIndex bloomIndex = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); JavaRDD<HoodieRecord> taggedRecordRDD = tagLocation(bloomIndex, recordRDD, hoodieTable); // Should not find any files for (HoodieRecord record : taggedRecordRDD.collect()) { assertFalse(record.isCurrentLocationKnown()); } final Map<String, List<Pair<String, Integer>>> partitionToFilesNameLengthMap = new HashMap<>(); final String partition1 = "2016/01/31"; final String partition2 = "2015/01/31"; // We create three parquet file, each having one record. (two different partitions) final String fileId1 = genRandomUUID(); final String commit1 = "0000001"; StoragePath baseFilePath = testTable.forCommit(commit1) .withInserts(partition1, fileId1, Collections.singletonList(record1)); long baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partition1, k -> new ArrayList<>()).add(Pair.of(fileId1, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit1, WriteOperationType.UPSERT, Collections.singletonList(partition1), partitionToFilesNameLengthMap, false, false); final String fileId2 = genRandomUUID(); final String commit2 = "0000002"; baseFilePath = testTable.forCommit(commit2) .withInserts(partition1, fileId2, Collections.singletonList(record2)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.clear(); partitionToFilesNameLengthMap.computeIfAbsent(partition1, k -> new ArrayList<>()).add(Pair.of(fileId2, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit2, WriteOperationType.UPSERT, Collections.singletonList(partition1), partitionToFilesNameLengthMap, false, false); final String fileId3 = genRandomUUID(); final String commit3 = "0000003"; baseFilePath = testTable.forCommit(commit3) .withInserts(partition2, fileId3, Collections.singletonList(record4)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.clear(); partitionToFilesNameLengthMap.computeIfAbsent(partition2, k -> new ArrayList<>()).add(Pair.of(fileId3, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit3, WriteOperationType.UPSERT, Collections.singletonList(partition2), partitionToFilesNameLengthMap, false, false); // We do the tag again metaClient = HoodieTableMetaClient.reload(metaClient); taggedRecordRDD = tagLocation(bloomIndex, recordRDD, HoodieSparkTable.create(config, context, metaClient)); // Check results for (HoodieRecord record : taggedRecordRDD.collect()) { if (record.getRecordKey().equals(rowKey1)) { if (record.getPartitionPath().equals(partition2)) { assertEquals(record.getCurrentLocation().getFileId(), fileId3); } else { assertEquals(record.getCurrentLocation().getFileId(), fileId1); } } else if (record.getRecordKey().equals(rowKey2)) { assertEquals(record.getCurrentLocation().getFileId(), fileId2); } else if (record.getRecordKey().equals(rowKey3)) { assertFalse(record.isCurrentLocationKnown()); } } } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testTagLocationOnNonpartitionedTable( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) throws Exception { // We have some records to be tagged (two different partitions) String rowKey1 = genRandomUUID(); String rowKey2 = genRandomUUID(); String rowKey3 = genRandomUUID(); String emptyPartitionPath = ""; HoodieRecord record1 = createSimpleRecord(rowKey1, "2016-01-31T03:16:41.415Z", 12, Option.of("")); HoodieRecord record2 = createSimpleRecord(rowKey2, "2016-01-31T03:20:41.415Z", 100, Option.of("")); HoodieRecord record3 = createSimpleRecord(rowKey3, "2016-01-31T03:16:41.415Z", 15, Option.of("")); JavaRDD<HoodieRecord> recordRDD = jsc.parallelize(Arrays.asList(record1, record2, record3)); // Also create the metadata and config HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); HoodieSparkTable hoodieTable = HoodieSparkTable.create(config, context, metaClient); metadataWriter = SparkHoodieBackedTableMetadataWriter.create(storageConf, config, context); HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(metaClient, SCHEMA, metadataWriter, Option.of(context)); // Let's tag HoodieBloomIndex bloomIndex = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); JavaRDD<HoodieRecord> taggedRecordRDD = tagLocation(bloomIndex, recordRDD, hoodieTable); // Should not find any files for (HoodieRecord record : taggedRecordRDD.collect()) { assertFalse(record.isCurrentLocationKnown()); } final Map<String, List<Pair<String, Integer>>> partitionToFilesNameLengthMap = new HashMap<>(); // We create three parquet file, each having one record final String fileId1 = genRandomUUID(); final String commit1 = "0000001"; StoragePath baseFilePath = testTable.forCommit(commit1) .withInserts(emptyPartitionPath, fileId1, Collections.singletonList(record1)); long baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(emptyPartitionPath, k -> new ArrayList<>()).add(Pair.of(fileId1, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit1, WriteOperationType.UPSERT, Collections.singletonList(emptyPartitionPath), partitionToFilesNameLengthMap, false, false); final String fileId2 = genRandomUUID(); final String commit2 = "0000002"; baseFilePath = testTable.forCommit(commit2) .withInserts(emptyPartitionPath, fileId2, Collections.singletonList(record2)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.clear(); partitionToFilesNameLengthMap.computeIfAbsent(emptyPartitionPath, k -> new ArrayList<>()).add(Pair.of(fileId2, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit2, WriteOperationType.UPSERT, Collections.singletonList(emptyPartitionPath), partitionToFilesNameLengthMap, false, false); final String fileId3 = UUID.randomUUID().toString(); final String commit3 = "0000003"; baseFilePath = testTable.forCommit(commit3) .withInserts(emptyPartitionPath, fileId3, Collections.singletonList(record3)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.clear(); partitionToFilesNameLengthMap.computeIfAbsent(emptyPartitionPath, k -> new ArrayList<>()).add(Pair.of(fileId3, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit3, WriteOperationType.UPSERT, Collections.singletonList(emptyPartitionPath), partitionToFilesNameLengthMap, false, false); // We do the tag again metaClient = HoodieTableMetaClient.reload(metaClient); taggedRecordRDD = tagLocation(bloomIndex, recordRDD, HoodieSparkTable.create(config, context, metaClient)); // Check results for (HoodieRecord record : taggedRecordRDD.collect()) { if (record.getRecordKey().equals(rowKey1)) { assertEquals(record.getCurrentLocation().getFileId(), fileId1); } else if (record.getRecordKey().equals(rowKey2)) { assertEquals(record.getCurrentLocation().getFileId(), fileId2); } else if (record.getRecordKey().equals(rowKey3)) { assertEquals(record.getCurrentLocation().getFileId(), fileId3); } } } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testCheckExists( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) throws Exception { // We have some records to be tagged (two different partitions) HoodieRecord record1 = createSimpleRecord("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0", "2016-01-31T03:16:41.415Z", 12); HoodieRecord record2 = createSimpleRecord("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0", "2016-01-31T03:20:41.415Z", 100); HoodieRecord record3 = createSimpleRecord("3eb5b87c-1fej-4edd-87b4-6ec96dc405a0", "2016-01-31T03:16:41.415Z", 15); // record key same as recordStr2 HoodieRecord record4 = createSimpleRecord("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0", "2015-01-31T03:16:41.415Z", 32); JavaRDD<HoodieKey> keysRDD = jsc.parallelize(Arrays.asList(record1.getKey(), record2.getKey(), record3.getKey(), record4.getKey())); // Also create the metadata and config HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); HoodieTable hoodieTable = HoodieSparkTable.create(config, context, metaClient); metadataWriter = SparkHoodieBackedTableMetadataWriter.create(storageConf, config, context); HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(metaClient, SCHEMA, metadataWriter, Option.of(context)); // Let's tag HoodieBloomIndex bloomIndex = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); JavaRDD<HoodieRecord> taggedRecords = tagLocation( bloomIndex, keysRDD.map(k -> new HoodieEmptyRecord<>(k, HoodieRecord.HoodieRecordType.AVRO)), hoodieTable); JavaPairRDD<HoodieKey, Option<Pair<String, String>>> recordLocationsRDD = taggedRecords .mapToPair(hr -> new Tuple2<>(hr.getKey(), hr.isCurrentLocationKnown() ? Option.of(Pair.of(hr.getPartitionPath(), hr.getCurrentLocation().getFileId())) : Option.empty()) ); // Should not find any files for (Tuple2<HoodieKey, Option<Pair<String, String>>> record : recordLocationsRDD.collect()) { assertTrue(!record._2.isPresent()); } final String partition1 = "2016/01/31"; final String partition2 = "2015/01/31"; final String fileId1 = genRandomUUID(); final String fileId2 = genRandomUUID(); final String fileId3 = genRandomUUID(); final Map<String, List<Pair<String, Integer>>> partitionToFilesNameLengthMap = new HashMap<>(); // We create three parquet file, each having one record. (two different partitions) final String commit1 = "0000001"; StoragePath baseFilePath = testTable.forCommit(commit1) .withInserts(partition1, fileId1, Collections.singletonList(record1)); long baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partition1, k -> new ArrayList<>()).add(Pair.of(fileId1, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit1, WriteOperationType.UPSERT, Collections.singletonList(partition1), partitionToFilesNameLengthMap, false, false); final String commit2 = "0000002"; partitionToFilesNameLengthMap.clear(); baseFilePath = testTable.forCommit(commit2) .withInserts(partition1, fileId2, Collections.singletonList(record2)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partition1, k -> new ArrayList<>()).add(Pair.of(fileId2, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit2, WriteOperationType.UPSERT, Collections.singletonList(partition1), partitionToFilesNameLengthMap, false, false); final String commit3 = "0000003"; partitionToFilesNameLengthMap.clear(); baseFilePath = testTable.forCommit(commit3) .withInserts(partition2, fileId3, Collections.singletonList(record4)); baseFileLength = storage.getPathInfo(baseFilePath).getLength(); partitionToFilesNameLengthMap.computeIfAbsent(partition2, k -> new ArrayList<>()).add(Pair.of(fileId3, Integer.valueOf((int) baseFileLength))); testTable.doWriteOperation(commit3, WriteOperationType.UPSERT, Collections.singletonList(partition2), partitionToFilesNameLengthMap, false, false); // We do the tag again metaClient = HoodieTableMetaClient.reload(metaClient); hoodieTable = HoodieSparkTable.create(config, context, metaClient); taggedRecords = tagLocation(bloomIndex, keysRDD.map(k -> new HoodieAvroRecord(k, null)), hoodieTable); recordLocationsRDD = taggedRecords .mapToPair(hr -> new Tuple2<>(hr.getKey(), hr.isCurrentLocationKnown() ? Option.of(Pair.of(hr.getPartitionPath(), hr.getCurrentLocation().getFileId())) : Option.empty()) ); // Check results for (Tuple2<HoodieKey, Option<Pair<String, String>>> record : recordLocationsRDD.collect()) { if (record._1.getRecordKey().equals("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0")) { assertTrue(record._2.isPresent()); assertEquals(fileId1, record._2.get().getRight()); } else if (record._1.getRecordKey().equals("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0")) { assertTrue(record._2.isPresent()); if (record._1.getPartitionPath().equals(partition2)) { assertEquals(fileId3, record._2.get().getRight()); } else { assertEquals(fileId2, record._2.get().getRight()); } } else if (record._1.getRecordKey().equals("3eb5b87c-1fej-4edd-87b4-6ec96dc405a0")) { assertFalse(record._2.isPresent()); } } } @ParameterizedTest(name = TEST_NAME_WITH_PARAMS) @MethodSource("configParams") public void testBloomFilterFalseError( boolean rangePruning, boolean treeFiltering, boolean bucketizedChecking, boolean useMetadataTable, boolean enableFileGroupIdKeySorting) throws Exception { // We have two hoodie records // We write record1 to a parquet file, using a bloom filter having both records HoodieRecord record1 = createSimpleRecord("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0", "2016-01-31T03:16:41.415Z", 12); HoodieRecord record2 = createSimpleRecord("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0", "2016-01-31T03:20:41.415Z", 100); BloomFilter filter = BloomFilterFactory.createBloomFilter(10000, 0.0000001, -1, BloomFilterTypeCode.SIMPLE.name()); filter.add(record2.getRecordKey()); HoodieSparkWriteableTestTable testTable = HoodieSparkWriteableTestTable.of(metaClient, SCHEMA, filter, Option.of(context)); String fileId = testTable.addCommit("000").getFileIdWithInserts("2016/01/31", record1); assertTrue(filter.mightContain(record1.getRecordKey())); assertTrue(filter.mightContain(record2.getRecordKey())); // We do the tag JavaRDD<HoodieRecord> recordRDD = jsc.parallelize(Arrays.asList(record1, record2)); HoodieWriteConfig config = makeConfig(rangePruning, treeFiltering, bucketizedChecking, useMetadataTable, enableFileGroupIdKeySorting); metaClient = HoodieTableMetaClient.reload(metaClient); HoodieTable table = HoodieSparkTable.create(config, context, metaClient); HoodieBloomIndex bloomIndex = new HoodieBloomIndex(config, SparkHoodieBloomIndexHelper.getInstance()); JavaRDD<HoodieRecord> taggedRecordRDD = tagLocation(bloomIndex, recordRDD, table); // Check results for (HoodieRecord record : taggedRecordRDD.collect()) { if (record.getKey().equals("1eb5b87a-1feh-4edd-87b4-6ec96dc405a0")) { assertEquals(record.getCurrentLocation().getFileId(), fileId); } else if (record.getRecordKey().equals("2eb5b87b-1feu-4edd-87b4-6ec96dc405a0")) { assertFalse(record.isCurrentLocationKnown()); } } } private static String genRandomUUID() { return genPseudoRandomUUID(RANDOM).toString(); } }
apache/impala
35,512
fe/src/main/java/org/apache/impala/catalog/Catalog.java
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.impala.catalog; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nullable; import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.DataOperationType; import org.apache.hadoop.hive.metastore.api.LockComponent; import org.apache.hadoop.hive.metastore.api.LockLevel; import org.apache.hadoop.hive.metastore.api.LockType; import org.apache.impala.analysis.FunctionName; import org.apache.impala.authorization.AuthorizationPolicy; import org.apache.impala.catalog.MetaStoreClientPool.MetaStoreClient; import org.apache.impala.catalog.monitor.CatalogMonitor; import org.apache.impala.common.TransactionException; import org.apache.impala.common.TransactionKeepalive; import org.apache.impala.common.TransactionKeepalive.HeartbeatContext; import org.apache.impala.compat.MetastoreShim; import org.apache.impala.thrift.TCatalogObject; import org.apache.impala.thrift.TFunction; import org.apache.impala.thrift.THdfsPartition; import org.apache.impala.thrift.TImpalaTableType; import org.apache.impala.thrift.TPartitionKeyValue; import org.apache.impala.thrift.TPrincipalType; import org.apache.impala.thrift.TTable; import org.apache.impala.thrift.TTableName; import org.apache.impala.thrift.TUniqueId; import org.apache.impala.util.EventSequence; import org.apache.impala.util.PatternMatcher; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; /** * Thread safe interface for reading and updating metadata stored in the Hive MetaStore. * This class provides a storage API for caching CatalogObjects: databases, tables, * and functions and the relevant metadata to go along with them. Although this class is * thread safe, it does not guarantee consistency with the MetaStore. It is important * to keep in mind that there may be external (potentially conflicting) concurrent * metastore updates occurring at any time. * The CatalogObject storage hierarchy is: * Catalog -> Db -> Table * -> Function * Each level has its own synchronization, so the cache of Dbs is synchronized and each * Db has a cache of tables which is synchronized independently. * * The catalog is populated with the impala builtins on startup. Builtins and user * functions are treated identically by the catalog. The builtins go in a specific * database that the user cannot modify. * Builtins are populated on startup in initBuiltins(). */ public abstract class Catalog implements AutoCloseable { // Initial catalog version and ID. public final static long INITIAL_CATALOG_VERSION = 0L; public static final TUniqueId INITIAL_CATALOG_SERVICE_ID = new TUniqueId(0L, 0L); public static final String DEFAULT_DB = "default"; private MetaStoreClientPool metaStoreClientPool_; // Cache of authorization policy metadata. Populated from data retried from the // Sentry Service, if configured. protected AuthorizationPolicy authPolicy_ = new AuthorizationPolicy(); // Thread safe cache of database metadata. protected CatalogObjectCache<Db> dbCache_ = new CatalogObjectCache<>(); // Cache of data sources. protected final CatalogObjectCache<DataSource> dataSources_; // Cache of transaction to write id mapping for the open transactions which are detected // by the catalogd via events processor. The entries get cleaned up on receiving commit // transaction or abort transaction events. // We need this mapping because not all the write ids for a commit event can be // retrieved from HMS. We can fetch write id for write events via getAllWriteEventInfo. // However, we don't have API to fetch write ids for DDL events. protected final ConcurrentHashMap<Long, Set<TableWriteId>> txnToWriteIds_ = new ConcurrentHashMap<>(); // Cache of known HDFS cache pools. Allows for checking the existence // of pools without hitting HDFS. protected final CatalogObjectCache<HdfsCachePool> hdfsCachePools_ = new CatalogObjectCache<HdfsCachePool>(false); // Cache of authorization cache invalidation markers. protected final CatalogObjectCache<AuthzCacheInvalidation> authzCacheInvalidation_ = new CatalogObjectCache<>(); // This member is responsible for heartbeating HMS locks and transactions. private TransactionKeepalive transactionKeepalive_; /** * Creates a new instance of Catalog backed by a given MetaStoreClientPool. */ public Catalog(MetaStoreClientPool metaStoreClientPool) { dataSources_ = new CatalogObjectCache<DataSource>(); metaStoreClientPool_ = Preconditions.checkNotNull(metaStoreClientPool); if (MetastoreShim.getMajorVersion() > 2) { transactionKeepalive_ = new TransactionKeepalive(metaStoreClientPool_); } else { transactionKeepalive_ = null; } } /** * Creates a Catalog instance with the default MetaStoreClientPool implementation. * Refer to MetaStoreClientPool class for more details. */ public Catalog() { this(new MetaStoreClientPool(0, 0)); } /** * Returns the Hive ACID user id used by this catalog. */ public abstract String getAcidUserId(); /** * Adds a new database to the catalog, replacing any existing database with the same * name. */ public void addDb(Db db) { dbCache_.add(db); } /** * Gets the Db object from the Catalog using a case-insensitive lookup on the name. * Returns null if no matching database is found. */ public Db getDb(String dbName) { Preconditions.checkArgument(dbName != null && !dbName.isEmpty(), "Null or empty database name given as argument to Catalog.getDb"); return dbCache_.get(dbName.toLowerCase()); } /** * Removes a database from the metadata cache. Returns the value removed or null * if not database was removed as part of this operation. Used by DROP DATABASE * statements. */ public @Nullable Db removeDb(String dbName) { Db removedDb = dbCache_.remove(dbName.toLowerCase()); if (removedDb != null) { removedDb.markRemoved(); } return removedDb; } /** * Returns all databases that match 'matcher'. */ public List<Db> getDbs(PatternMatcher matcher) { return filterCatalogObjectsByPattern(dbCache_.getValues(), matcher); } /** * Returns the Table object for the given dbName/tableName or null if the database or * table does not exist. */ public @Nullable Table getTableNoThrow(String dbName, String tableName) { Db db = getDb(dbName); if (db == null) return null; return db.getTable(tableName); } /** * Returns the Table object for the given dbName/tableName. Throws if the database * does not exists. Returns null if the table does not exist. * TODO: Clean up the inconsistent error behavior (throwing vs. returning null). */ public Table getTable(String dbName, String tableName) throws DatabaseNotFoundException { Db db = getDb(dbName); if (db == null) { throw new DatabaseNotFoundException("Database '" + dbName + "' not found"); } return db.getTable(tableName); } /** * Returns the table with the given name if it's completely loaded in the cache. * Otherwise, return an IncompleteTable for it. */ public Table getTableIfCachedNoThrow(String dbName, String tableName) { // In legacy catalog implementation, this is the same behavior as getTableNoThrow. return getTableNoThrow(dbName, tableName); } /** * The same as getTableIfCachedNoThrow except that we'll throw an exception if database * not exists. */ public Table getTableIfCached(String dbName, String tableName) throws DatabaseNotFoundException { return getTable(dbName, tableName); } /** * Removes a table from the catalog and returns the table that was removed, or null * if the table/database does not exist. */ public @Nullable Table removeTable(TTableName tableName) { // Remove the old table name from the cache and add the new table. Db db = getDb(tableName.getDb_name()); if (db == null) return null; Table tbl = db.removeTable(tableName.getTable_name()); if (tbl != null && !tbl.isStoredInImpaladCatalogCache()) { CatalogMonitor.INSTANCE.getCatalogTableMetrics().removeTable(tbl); } return tbl; } /** * Returns all tables and views in 'dbName' that match 'matcher'. */ public List<String> getTableNames(String dbName, PatternMatcher matcher) throws DatabaseNotFoundException { return getTableNames(dbName, matcher, /*tableTypes*/ Collections.emptySet()); } /** * Returns all tables of types specified in 'tableTypes' under the database 'dbName' * that match 'matcher'. * * dbName must not be null. * * Table names are returned unqualified. */ public List<String> getTableNames(String dbName, PatternMatcher matcher, Set<TImpalaTableType> tableTypes) throws DatabaseNotFoundException { Preconditions.checkNotNull(dbName); Db db = getDb(dbName); if (db == null) { throw new DatabaseNotFoundException("Database '" + dbName + "' not found"); } return filterStringsByPattern(db.getAllTableNames(tableTypes), matcher); } /** * Returns true if the table and the database exist in the Impala Catalog. Returns * false if either the table or the database do not exist. */ public boolean containsTable(String dbName, String tableName) { Db db = getDb(dbName); return (db == null) ? false : db.containsTable(tableName); } /** * Adds a data source to the in-memory map of data sources. * @return true if this item was added or false if the existing value was preserved. */ public boolean addDataSource(DataSource dataSource) { return dataSources_.add(dataSource); } /** * Removes a data source from the in-memory map of data sources. * @return the item that was removed if it existed in the cache, null otherwise. */ public DataSource removeDataSource(String dataSourceName) { Preconditions.checkNotNull(dataSourceName); return dataSources_.remove(dataSourceName.toLowerCase()); } /** * Gets the specified data source. */ public DataSource getDataSource(String dataSourceName) { Preconditions.checkNotNull(dataSourceName); return dataSources_.get(dataSourceName.toLowerCase()); } /** * Gets a list of all data sources. */ public List<DataSource> getDataSources() { return dataSources_.getValues(); } /** * Returns a list of data sources names that match pattern. * * @see PatternMatcher#matches(String) for details of the pattern match semantics. * * pattern may be null (and thus matches everything). */ public List<String> getDataSourceNames(String pattern) { return filterStringsByPattern(dataSources_.keySet(), PatternMatcher.createHivePatternMatcher(pattern)); } /** * Returns all DataSources that match 'matcher'. */ public List<DataSource> getDataSources(PatternMatcher matcher) { return filterCatalogObjectsByPattern(dataSources_.getValues(), matcher); } /** * Adds a function to the catalog. * Returns true if the function was successfully added. * Returns false if the function already exists. * TODO: allow adding a function to a global scope. We probably want this to resolve * after the local scope. * e.g. if we had fn() and db.fn(). If the current database is 'db', fn() would * resolve first to db.fn(). */ public boolean addFunction(Function fn) { Db db = getDb(fn.dbName()); if (db == null) return false; return db.addFunction(fn); } /** * Returns the function that best matches 'desc' that is registered with the * catalog using 'mode' to check for matching. * If desc matches multiple functions in the catalog, it will return the function with * the strictest matching mode. * If multiple functions match at the same matching mode, best match is defined as the * one that requires the least number of arguments to be converted. * Ties are broken by comparing argument types in lexical order. Argument types are * ordered by argument precision (e.g. double is preferred over float) and then by * alphabetical order of argument type name, to guarantee deterministic results. */ public @Nullable Function getFunction(Function desc, Function.CompareMode mode) { Db db = getDb(desc.dbName()); if (db == null) return null; return db.getFunction(desc, mode); } /** * Removes a function from the catalog. Increments the catalog version and returns * the Function object that was removed if the function existed, otherwise returns * null. */ public @Nullable Function removeFunction(Function desc) { Db db = getDb(desc.dbName()); if (db == null) return null; return db.removeFunction(desc); } /** * Returns true if there is a function with this function name. Parameters * are ignored. */ public boolean containsFunction(FunctionName name) { Db db = getDb(name.getDb()); if (db == null) return false; return db.containsFunction(name.getFunction()); } /** * Adds a new HdfsCachePool to the catalog. */ public boolean addHdfsCachePool(HdfsCachePool cachePool) { return hdfsCachePools_.add(cachePool); } /** * Gets a HdfsCachePool given a cache pool name. Returns null if the cache * pool does not exist. */ public HdfsCachePool getHdfsCachePool(String poolName) { return hdfsCachePools_.get(poolName); } /** * Gets the {@link AuthzCacheInvalidation} for a given marker name. */ public AuthzCacheInvalidation getAuthzCacheInvalidation(String markerName) { return authzCacheInvalidation_.get(Preconditions.checkNotNull(markerName)); } /** * Release the Hive Meta Store Client resources. Can be called multiple times * (additional calls will be no-ops). */ @Override public void close() { metaStoreClientPool_.close(); } @VisibleForTesting public MetaStoreClientPool getMetaStoreClientPool() { return metaStoreClientPool_; } @VisibleForTesting public void setMetaStoreClientPool(MetaStoreClientPool pool) { metaStoreClientPool_ = pool; } /** * Returns a managed meta store client from the client connection pool. */ public MetaStoreClient getMetaStoreClient() { return metaStoreClientPool_.getClient(); } /** * Same as the above but also update the given 'timeline'. */ public MetaStoreClient getMetaStoreClient(EventSequence timeline) { MetaStoreClient client = getMetaStoreClient(); timeline.markEvent("Got Metastore client"); return client; } public int getNumHmsClientsIdle() { return metaStoreClientPool_.getNumHmsClientsIdle(); } public int getNumHmsClientsInUse() { return metaStoreClientPool_.getNumHmsClientsInUse(); } /** * Return all members of 'candidates' that match 'matcher'. * The results are sorted in String.CASE_INSENSITIVE_ORDER. * matcher must not be null. */ public static List<String> filterStringsByPattern(Iterable<String> candidates, PatternMatcher matcher) { Preconditions.checkNotNull(matcher); List<String> filtered = new ArrayList<>(); for (String candidate: candidates) { if (matcher.matches(candidate)) filtered.add(candidate); } Collections.sort(filtered, String.CASE_INSENSITIVE_ORDER); return filtered; } private static class CatalogObjectOrder implements Comparator<HasName> { @Override public int compare(HasName o1, HasName o2) { return String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName()); } } private static final CatalogObjectOrder CATALOG_OBJECT_ORDER = new CatalogObjectOrder(); /** * Return all members of 'candidates' that match 'matcher'. * The results are sorted in CATALOG_OBJECT_ORDER. * matcher must not be null. */ public static <T extends HasName> List<T> filterCatalogObjectsByPattern( Iterable<? extends T> candidates, PatternMatcher matcher) { Preconditions.checkNotNull(matcher); List<T> filtered = new ArrayList<>(); for (T candidate: candidates) { if (matcher.matches(candidate.getName())) filtered.add(candidate); } Collections.sort(filtered, CATALOG_OBJECT_ORDER); return filtered; } public HdfsPartition getHdfsPartition(String dbName, String tableName, org.apache.hadoop.hive.metastore.api.Partition msPart) throws CatalogException { List<TPartitionKeyValue> partitionSpec = new ArrayList<>(); Table table = getTable(dbName, tableName); if (!(table instanceof HdfsTable)) { throw new PartitionNotFoundException( "Not an HdfsTable: " + dbName + "." + tableName); } for (int i = 0; i < msPart.getValues().size(); ++i) { partitionSpec.add(new TPartitionKeyValue( ((HdfsTable)table).getColumns().get(i).getName(), msPart.getValues().get(i))); } return getHdfsPartition(table.getDb().getName(), table.getName(), partitionSpec); } /** * Returns the HdfsPartition object for the given dbName/tableName and partition spec. * This will trigger a metadata load if the table metadata is not yet cached. * @throws DatabaseNotFoundException - If the database does not exist. * @throws TableNotFoundException - If the table does not exist. * @throws PartitionNotFoundException - If the partition does not exist. * @throws TableLoadingException - If there is an error loading the table metadata. */ public HdfsPartition getHdfsPartition(String dbName, String tableName, List<TPartitionKeyValue> partitionSpec) throws CatalogException { String partitionNotFoundMsg = "Partition not found: " + Joiner.on(", ").join(partitionSpec); Table table = getTable(dbName, tableName); // This is not an Hdfs table, throw an error. if (!(table instanceof HdfsTable)) { throw new PartitionNotFoundException(partitionNotFoundMsg); } // Get the HdfsPartition object for the given partition spec. HdfsPartition partition = ((HdfsTable) table).getPartitionFromThriftPartitionSpec(partitionSpec); if (partition == null) throw new PartitionNotFoundException(partitionNotFoundMsg); return partition; } /** * Returns true if the table contains the given partition spec, otherwise false. * This may trigger a metadata load if the table metadata is not yet cached. * @throws DatabaseNotFoundException - If the database does not exist. * @throws TableNotFoundException - If the table does not exist. * @throws TableLoadingException - If there is an error loading the table metadata. */ public boolean containsHdfsPartition(String dbName, String tableName, List<TPartitionKeyValue> partitionSpec) throws CatalogException { try { return getHdfsPartition(dbName, tableName, partitionSpec) != null; } catch (PartitionNotFoundException e) { return false; } } /** * Gets the thrift representation of a catalog object, given the "object * description". The object description is just a TCatalogObject with only the * catalog object type and object name set. * If the object is not found, a CatalogException is thrown. */ public TCatalogObject getTCatalogObject(TCatalogObject objectDesc) throws CatalogException { return getTCatalogObject(objectDesc, false); } /** * Gets the thrift representation of a catalog object, given the "object * description". The object description is just a TCatalogObject with only the * catalog object type and object name set. * If the object is not found, a CatalogException is thrown. * @param objectDesc the object description. * @param isHumanReadable whether the request is for human readable purpose or not. * If False, return full object. Otherwise, return smaller * catalog object that omit some field. * @return the requested catalog object. * @throws CatalogException */ public TCatalogObject getTCatalogObject( TCatalogObject objectDesc, boolean isHumanReadable) throws CatalogException { TCatalogObject result = new TCatalogObject(); switch (objectDesc.getType()) { case DATABASE: { Db db = getDb(objectDesc.getDb().getDb_name()); if (db == null) { throw new CatalogException( "Database not found: " + objectDesc.getDb().getDb_name()); } result.setType(db.getCatalogObjectType()); result.setCatalog_version(db.getCatalogVersion()); result.setLast_modified_time_ms(db.getLastLoadedTimeMs()); result.setDb(db.toThrift()); break; } case TABLE: case VIEW: { Table table = getTable(objectDesc.getTable().getDb_name(), objectDesc.getTable().getTbl_name()); if (table == null) { throw new CatalogException("Table not found: " + objectDesc.getTable().getTbl_name()); } table.takeReadLock(); try { result.setType(table.getCatalogObjectType()); result.setCatalog_version(table.getCatalogVersion()); result.setLast_modified_time_ms(table.getLastLoadedTimeMs()); result.setTable( isHumanReadable ? table.toHumanReadableThrift() : table.toThrift()); } finally { table.releaseReadLock(); } break; } // TODO(IMPALA-9935): support HDFS_PARTITION case FUNCTION: { TFunction tfn = objectDesc.getFn(); Function desc = Function.fromThrift(tfn); Function fn = getFunction(desc, Function.CompareMode.IS_INDISTINGUISHABLE); if (fn == null) { throw new CatalogException("Function not found: " + tfn); } result.setType(fn.getCatalogObjectType()); result.setCatalog_version(fn.getCatalogVersion()); result.setLast_modified_time_ms(fn.getLastLoadedTimeMs()); result.setFn(fn.toThrift()); break; } case DATA_SOURCE: { String dataSrcName = objectDesc.getData_source().getName(); DataSource dataSrc = getDataSource(dataSrcName); if (dataSrc == null) { throw new CatalogException("Data source not found: " + dataSrcName); } result.setType(dataSrc.getCatalogObjectType()); result.setCatalog_version(dataSrc.getCatalogVersion()); result.setLast_modified_time_ms(dataSrc.getLastLoadedTimeMs()); result.setData_source(dataSrc.toThrift()); break; } case HDFS_CACHE_POOL: { HdfsCachePool pool = getHdfsCachePool(objectDesc.getCache_pool().getPool_name()); if (pool == null) { throw new CatalogException( "Hdfs cache pool not found: " + objectDesc.getCache_pool().getPool_name()); } result.setType(pool.getCatalogObjectType()); result.setCatalog_version(pool.getCatalogVersion()); result.setLast_modified_time_ms(pool.getLastLoadedTimeMs()); result.setCache_pool(pool.toThrift()); break; } case PRINCIPAL: Principal principal = authPolicy_.getPrincipal( objectDesc.getPrincipal().getPrincipal_name(), objectDesc.getPrincipal().getPrincipal_type()); if (principal == null) { throw new CatalogException("Principal not found: " + objectDesc.getPrincipal().getPrincipal_name()); } result.setType(principal.getCatalogObjectType()); result.setCatalog_version(principal.getCatalogVersion()); result.setPrincipal(principal.toThrift()); break; case PRIVILEGE: Principal tmpPrincipal = authPolicy_.getPrincipal( objectDesc.getPrivilege().getPrincipal_id(), objectDesc.getPrivilege().getPrincipal_type()); if (tmpPrincipal == null) { throw new CatalogException(String.format("No %s associated with ID: %d", Principal.toString(objectDesc.getPrivilege().getPrincipal_type()) .toLowerCase(), objectDesc.getPrivilege().getPrincipal_id())); } String privilegeName = PrincipalPrivilege.buildPrivilegeName( objectDesc.getPrivilege()); PrincipalPrivilege privilege = tmpPrincipal.getPrivilege(privilegeName); if (privilege != null) { result.setType(privilege.getCatalogObjectType()); result.setCatalog_version(privilege.getCatalogVersion()); result.setPrivilege(privilege.toThrift()); return result; } throw new CatalogException(String.format("%s '%s' does not contain " + "privilege: '%s'", Principal.toString(tmpPrincipal.getPrincipalType()), tmpPrincipal.getName(), privilegeName)); case AUTHZ_CACHE_INVALIDATION: AuthzCacheInvalidation authzCacheInvalidation = getAuthzCacheInvalidation( objectDesc.getAuthz_cache_invalidation().getMarker_name()); if (authzCacheInvalidation == null) { // Authorization cache invalidation requires a single catalog object and it // needs to exist. throw new CatalogException("Authz cache invalidation not found: " + objectDesc.getAuthz_cache_invalidation().getMarker_name()); } result.setType(authzCacheInvalidation.getCatalogObjectType()); result.setCatalog_version(authzCacheInvalidation.getCatalogVersion()); result.setAuthz_cache_invalidation(authzCacheInvalidation.toThrift()); break; default: throw new IllegalStateException( "Unexpected TCatalogObject type: " + objectDesc.getType()); } return result; } public static boolean isDefaultDb(String dbName) { return DEFAULT_DB.equals(dbName.toLowerCase()); } /** * Returns a unique string key of a catalog object. * * This method may initially seem counter-intuitive because Catalog::getUniqueName() * uses this method to build a unique name instead of Catalog::getUniqueName() * providing the implementation on how to build a catalog object key. The reason is * building CatalogObject from TCatalogObject in order to call getUniqueName() can * be an expensive operation, especially for constructing a Table catalog object * from TCatalogObject. */ public static String toCatalogObjectKey(TCatalogObject catalogObject) { Preconditions.checkNotNull(catalogObject); switch (catalogObject.getType()) { case DATABASE: return "DATABASE:" + catalogObject.getDb().getDb_name().toLowerCase(); case TABLE: TTable tbl = catalogObject.getTable(); return "TABLE:" + tbl.getDb_name().toLowerCase() + "." + tbl.getTbl_name().toLowerCase(); case VIEW: TTable view = catalogObject.getTable(); return "VIEW:" + view.getDb_name().toLowerCase() + "." + view.getTbl_name().toLowerCase(); case HDFS_PARTITION: THdfsPartition part = catalogObject.getHdfs_partition(); return "HDFS_PARTITION:" + part.getDb_name().toLowerCase() + "." + part.getTbl_name().toLowerCase() + ":" + Preconditions.checkNotNull(part.getPartition_name()); case FUNCTION: return "FUNCTION:" + catalogObject.getFn().getName() + "(" + catalogObject.getFn().getSignature() + ")"; case PRINCIPAL: // It is important to make the principal object key unique since it is possible // to have the same name for both role and user. String principalName = catalogObject.getPrincipal().getPrincipal_name(); if (catalogObject.getPrincipal().getPrincipal_type() == TPrincipalType.ROLE) { principalName = principalName.toLowerCase(); } return "PRINCIPAL:" + principalName + "." + catalogObject.getPrincipal().getPrincipal_type().name(); case PRIVILEGE: // The combination of privilege name + principal ID + principal type is // guaranteed to be unique. return "PRIVILEGE:" + PrincipalPrivilege.buildPrivilegeName(catalogObject.getPrivilege()) + "." + catalogObject.getPrivilege().getPrincipal_id() + "." + catalogObject.getPrivilege().getPrincipal_type(); case HDFS_CACHE_POOL: return "HDFS_CACHE_POOL:" + catalogObject.getCache_pool().getPool_name().toLowerCase(); case DATA_SOURCE: return "DATA_SOURCE:" + catalogObject.getData_source().getName().toLowerCase(); case AUTHZ_CACHE_INVALIDATION: return "AUTHZ_CACHE_INVALIDATION:" + catalogObject.getAuthz_cache_invalidation() .getMarker_name().toLowerCase(); case CATALOG: return "CATALOG_SERVICE_ID"; default: throw new IllegalStateException( "Unsupported catalog object type: " + catalogObject.getType()); } } public static String toCatalogObjectSummary(TCatalogObject catalogObject) { return String.format("%s(%d)", toCatalogObjectKey(catalogObject), catalogObject.catalog_version); } /** * Returns true if the two objects have the same object type and key (generated using * toCatalogObjectKey()). */ public static boolean keyEquals(TCatalogObject first, TCatalogObject second) { return toCatalogObjectKey(first).equals(toCatalogObjectKey(second)); } /** * Opens a transaction and returns a Transaction object that can be used in a * try-with-resources statement. That way transactions won't leak. * @param hmsClient the client towards HMS. * @param ctx Context for heartbeating. * @return an AutoCloseable transaction object. * @throws TransactionException */ public Transaction openTransaction(IMetaStoreClient hmsClient, HeartbeatContext ctx) throws TransactionException { return new Transaction(hmsClient, transactionKeepalive_, getAcidUserId(), ctx); } /** * Creates an exclusive lock for a particular table and acquires it in the HMS. Starts * heartbeating the lock. This function is for locks that doesn't belong to a * transaction. The client of this function is responsible for calling * 'releaseTableLock()'. * @param dbName Name of the DB where the particular table is. * @param tableName Name of the table where the lock is acquired. * @param lockMaxWaitTime Maximum wait time on the ACID lock. * @throws TransactionException */ public long lockTableStandalone(String dbName, String tableName, HeartbeatContext ctx, int lockMaxWaitTime) throws TransactionException { return lockTableInternal(dbName, tableName, 0L, DataOperationType.NO_TXN, ctx, lockMaxWaitTime); } /** * Creates an exclusive lock for a particular table and acquires it in the HMS. * This function can only be invoked in a transaction context, i.e. 'txnId' * cannot be 0. * @param dbName Name of the DB where the particular table is. * @param tableName Name of the table where the lock is acquired. * @param transaction the transaction that needs to lock the table. * @param lockMaxWaitTime Maximum wait time on the ACID lock. * @throws TransactionException */ public void lockTableInTransaction(String dbName, String tableName, Transaction transaction, DataOperationType opType, HeartbeatContext ctx, int lockMaxWaitTime) throws TransactionException { Preconditions.checkState(transaction.getId() > 0); lockTableInternal(dbName, tableName, transaction.getId(), opType, ctx, lockMaxWaitTime); } /** * Creates an exclusive lock for a particular table and acquires it in the HMS. Starts * heartbeating the lock if it doesn't have a transaction context. * @param dbName Name of the DB where the particular table is. * @param tableName Name of the table where the lock is acquired. * @param txnId id of the transaction, 0 for standalone locks. * @param lockMaxWaitTime Maximum wait time on the ACID lock. * @throws TransactionException */ private long lockTableInternal(String dbName, String tableName, long txnId, DataOperationType opType, HeartbeatContext ctx, int lockMaxWaitTime) throws TransactionException { Preconditions.checkState(txnId >= 0); LockComponent lockComponent = new LockComponent(); lockComponent.setDbname(dbName); lockComponent.setTablename(tableName); lockComponent.setLevel(LockLevel.TABLE); lockComponent.setType(LockType.EXCLUSIVE); lockComponent.setOperationType(opType); List<LockComponent> lockComponents = Arrays.asList(lockComponent); long lockId = -1L; try (MetaStoreClient client = metaStoreClientPool_.getClient()) { lockId = MetastoreShim.acquireLock(client.getHiveClient(), txnId, lockComponents, lockMaxWaitTime); if (txnId == 0L) transactionKeepalive_.addLock(lockId, ctx); } return lockId; } /** * Releases a lock based on its ID from HMS and stops heartbeating it. * @param lockId is the ID of the lock to clear. */ public void releaseTableLock(long lockId) throws TransactionException { try (MetaStoreClient client = metaStoreClientPool_.getClient()) { transactionKeepalive_.deleteLock(lockId); MetastoreShim.releaseLock(client.getHiveClient(), lockId); } } /** * Returns write ids for an open txn from the Catalog. If there is no write id * associated with the txnId, it returns empty set. */ public Set<TableWriteId> getWriteIds(Long txnId) { Preconditions.checkNotNull(txnId); return Collections.unmodifiableSet(txnToWriteIds_.getOrDefault(txnId, Collections.emptySet())); } /** * Adds a mapping from txnId to tableWriteId to the Catalog. */ public void addWriteId(Long txnId, TableWriteId tableWriteId) { Preconditions.checkNotNull(txnId); Preconditions.checkNotNull(tableWriteId); txnToWriteIds_.computeIfAbsent(txnId, k -> new HashSet<>()).add(tableWriteId); } /** * Removes and returns all write id records for a transaction. If there is no write id * associated with the txnId, it returns empty set. */ public Set<TableWriteId> removeWriteIds(Long txnId) { Preconditions.checkNotNull(txnId); Set<TableWriteId> resultSet = txnToWriteIds_.remove(txnId); return resultSet != null ? resultSet : Collections.emptySet(); } /** * Clears all write id records. */ public void clearWriteIds() { txnToWriteIds_.clear(); } }
googleapis/google-cloud-java
35,238
java-notebooks/proto-google-cloud-notebooks-v1/src/main/java/com/google/cloud/notebooks/v1/IsInstanceUpgradeableResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/notebooks/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.notebooks.v1; /** * * * <pre> * Response for checking if a notebook instance is upgradeable. * </pre> * * Protobuf type {@code google.cloud.notebooks.v1.IsInstanceUpgradeableResponse} */ public final class IsInstanceUpgradeableResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) IsInstanceUpgradeableResponseOrBuilder { private static final long serialVersionUID = 0L; // Use IsInstanceUpgradeableResponse.newBuilder() to construct. private IsInstanceUpgradeableResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private IsInstanceUpgradeableResponse() { upgradeVersion_ = ""; upgradeInfo_ = ""; upgradeImage_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new IsInstanceUpgradeableResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_IsInstanceUpgradeableResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_IsInstanceUpgradeableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.class, com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.Builder.class); } public static final int UPGRADEABLE_FIELD_NUMBER = 1; private boolean upgradeable_ = false; /** * * * <pre> * If an instance is upgradeable. * </pre> * * <code>bool upgradeable = 1;</code> * * @return The upgradeable. */ @java.lang.Override public boolean getUpgradeable() { return upgradeable_; } public static final int UPGRADE_VERSION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object upgradeVersion_ = ""; /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @return The upgradeVersion. */ @java.lang.Override public java.lang.String getUpgradeVersion() { java.lang.Object ref = upgradeVersion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); upgradeVersion_ = s; return s; } } /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @return The bytes for upgradeVersion. */ @java.lang.Override public com.google.protobuf.ByteString getUpgradeVersionBytes() { java.lang.Object ref = upgradeVersion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); upgradeVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int UPGRADE_INFO_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object upgradeInfo_ = ""; /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @return The upgradeInfo. */ @java.lang.Override public java.lang.String getUpgradeInfo() { java.lang.Object ref = upgradeInfo_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); upgradeInfo_ = s; return s; } } /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @return The bytes for upgradeInfo. */ @java.lang.Override public com.google.protobuf.ByteString getUpgradeInfoBytes() { java.lang.Object ref = upgradeInfo_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); upgradeInfo_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int UPGRADE_IMAGE_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object upgradeImage_ = ""; /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @return The upgradeImage. */ @java.lang.Override public java.lang.String getUpgradeImage() { java.lang.Object ref = upgradeImage_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); upgradeImage_ = s; return s; } } /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @return The bytes for upgradeImage. */ @java.lang.Override public com.google.protobuf.ByteString getUpgradeImageBytes() { java.lang.Object ref = upgradeImage_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); upgradeImage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (upgradeable_ != false) { output.writeBool(1, upgradeable_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(upgradeVersion_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, upgradeVersion_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(upgradeInfo_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, upgradeInfo_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(upgradeImage_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, upgradeImage_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (upgradeable_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(1, upgradeable_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(upgradeVersion_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, upgradeVersion_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(upgradeInfo_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, upgradeInfo_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(upgradeImage_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, upgradeImage_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse)) { return super.equals(obj); } com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse other = (com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) obj; if (getUpgradeable() != other.getUpgradeable()) return false; if (!getUpgradeVersion().equals(other.getUpgradeVersion())) return false; if (!getUpgradeInfo().equals(other.getUpgradeInfo())) return false; if (!getUpgradeImage().equals(other.getUpgradeImage())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + UPGRADEABLE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getUpgradeable()); hash = (37 * hash) + UPGRADE_VERSION_FIELD_NUMBER; hash = (53 * hash) + getUpgradeVersion().hashCode(); hash = (37 * hash) + UPGRADE_INFO_FIELD_NUMBER; hash = (53 * hash) + getUpgradeInfo().hashCode(); hash = (37 * hash) + UPGRADE_IMAGE_FIELD_NUMBER; hash = (53 * hash) + getUpgradeImage().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for checking if a notebook instance is upgradeable. * </pre> * * Protobuf type {@code google.cloud.notebooks.v1.IsInstanceUpgradeableResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_IsInstanceUpgradeableResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_IsInstanceUpgradeableResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.class, com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.Builder.class); } // Construct using com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; upgradeable_ = false; upgradeVersion_ = ""; upgradeInfo_ = ""; upgradeImage_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.notebooks.v1.NotebooksProto .internal_static_google_cloud_notebooks_v1_IsInstanceUpgradeableResponse_descriptor; } @java.lang.Override public com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse getDefaultInstanceForType() { return com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse build() { com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse buildPartial() { com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse result = new com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.upgradeable_ = upgradeable_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.upgradeVersion_ = upgradeVersion_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.upgradeInfo_ = upgradeInfo_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.upgradeImage_ = upgradeImage_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) { return mergeFrom((com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse other) { if (other == com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse.getDefaultInstance()) return this; if (other.getUpgradeable() != false) { setUpgradeable(other.getUpgradeable()); } if (!other.getUpgradeVersion().isEmpty()) { upgradeVersion_ = other.upgradeVersion_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getUpgradeInfo().isEmpty()) { upgradeInfo_ = other.upgradeInfo_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getUpgradeImage().isEmpty()) { upgradeImage_ = other.upgradeImage_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { upgradeable_ = input.readBool(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { upgradeVersion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { upgradeInfo_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { upgradeImage_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private boolean upgradeable_; /** * * * <pre> * If an instance is upgradeable. * </pre> * * <code>bool upgradeable = 1;</code> * * @return The upgradeable. */ @java.lang.Override public boolean getUpgradeable() { return upgradeable_; } /** * * * <pre> * If an instance is upgradeable. * </pre> * * <code>bool upgradeable = 1;</code> * * @param value The upgradeable to set. * @return This builder for chaining. */ public Builder setUpgradeable(boolean value) { upgradeable_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * If an instance is upgradeable. * </pre> * * <code>bool upgradeable = 1;</code> * * @return This builder for chaining. */ public Builder clearUpgradeable() { bitField0_ = (bitField0_ & ~0x00000001); upgradeable_ = false; onChanged(); return this; } private java.lang.Object upgradeVersion_ = ""; /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @return The upgradeVersion. */ public java.lang.String getUpgradeVersion() { java.lang.Object ref = upgradeVersion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); upgradeVersion_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @return The bytes for upgradeVersion. */ public com.google.protobuf.ByteString getUpgradeVersionBytes() { java.lang.Object ref = upgradeVersion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); upgradeVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @param value The upgradeVersion to set. * @return This builder for chaining. */ public Builder setUpgradeVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } upgradeVersion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @return This builder for chaining. */ public Builder clearUpgradeVersion() { upgradeVersion_ = getDefaultInstance().getUpgradeVersion(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The version this instance will be upgraded to if calling the upgrade * endpoint. This field will only be populated if field upgradeable is true. * </pre> * * <code>string upgrade_version = 2;</code> * * @param value The bytes for upgradeVersion to set. * @return This builder for chaining. */ public Builder setUpgradeVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); upgradeVersion_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object upgradeInfo_ = ""; /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @return The upgradeInfo. */ public java.lang.String getUpgradeInfo() { java.lang.Object ref = upgradeInfo_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); upgradeInfo_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @return The bytes for upgradeInfo. */ public com.google.protobuf.ByteString getUpgradeInfoBytes() { java.lang.Object ref = upgradeInfo_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); upgradeInfo_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @param value The upgradeInfo to set. * @return This builder for chaining. */ public Builder setUpgradeInfo(java.lang.String value) { if (value == null) { throw new NullPointerException(); } upgradeInfo_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @return This builder for chaining. */ public Builder clearUpgradeInfo() { upgradeInfo_ = getDefaultInstance().getUpgradeInfo(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Additional information about upgrade. * </pre> * * <code>string upgrade_info = 3;</code> * * @param value The bytes for upgradeInfo to set. * @return This builder for chaining. */ public Builder setUpgradeInfoBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); upgradeInfo_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object upgradeImage_ = ""; /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @return The upgradeImage. */ public java.lang.String getUpgradeImage() { java.lang.Object ref = upgradeImage_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); upgradeImage_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @return The bytes for upgradeImage. */ public com.google.protobuf.ByteString getUpgradeImageBytes() { java.lang.Object ref = upgradeImage_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); upgradeImage_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @param value The upgradeImage to set. * @return This builder for chaining. */ public Builder setUpgradeImage(java.lang.String value) { if (value == null) { throw new NullPointerException(); } upgradeImage_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @return This builder for chaining. */ public Builder clearUpgradeImage() { upgradeImage_ = getDefaultInstance().getUpgradeImage(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The new image self link this instance will be upgraded to if calling the * upgrade endpoint. This field will only be populated if field upgradeable * is true. * </pre> * * <code>string upgrade_image = 4;</code> * * @param value The bytes for upgradeImage to set. * @return This builder for chaining. */ public Builder setUpgradeImageBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); upgradeImage_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) } // @@protoc_insertion_point(class_scope:google.cloud.notebooks.v1.IsInstanceUpgradeableResponse) private static final com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse(); } public static com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<IsInstanceUpgradeableResponse> PARSER = new com.google.protobuf.AbstractParser<IsInstanceUpgradeableResponse>() { @java.lang.Override public IsInstanceUpgradeableResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<IsInstanceUpgradeableResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<IsInstanceUpgradeableResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.notebooks.v1.IsInstanceUpgradeableResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/conscrypt
35,452
common/src/main/java/org/conscrypt/ConscryptEngineSocket.java
/* * Copyright 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.conscrypt; import static org.conscrypt.SSLUtils.EngineStates.STATE_CLOSED; import static org.conscrypt.SSLUtils.EngineStates.STATE_HANDSHAKE_COMPLETED; import static org.conscrypt.SSLUtils.EngineStates.STATE_HANDSHAKE_STARTED; import static org.conscrypt.SSLUtils.EngineStates.STATE_NEW; import static org.conscrypt.SSLUtils.EngineStates.STATE_READY; import static org.conscrypt.SSLUtils.EngineStates.STATE_READY_HANDSHAKE_CUT_THROUGH; import static javax.net.ssl.SSLEngineResult.Status.CLOSED; import static javax.net.ssl.SSLEngineResult.Status.OK; import org.conscrypt.metrics.StatsLog; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.InetAddress; import java.net.Socket; import java.net.SocketException; import java.nio.ByteBuffer; import java.security.PrivateKey; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLEngineResult; import javax.net.ssl.SSLEngineResult.HandshakeStatus; import javax.net.ssl.SSLException; import javax.net.ssl.SSLParameters; import javax.net.ssl.SSLSession; import javax.net.ssl.X509ExtendedTrustManager; import javax.net.ssl.X509KeyManager; import javax.net.ssl.X509TrustManager; import javax.security.auth.x500.X500Principal; /** * Implements crypto handling by delegating to {@link ConscryptEngine}. */ class ConscryptEngineSocket extends OpenSSLSocketImpl implements SSLParametersImpl.AliasChooser { private static final ByteBuffer EMPTY_BUFFER = ByteBuffer.allocate(0); private final ConscryptEngine engine; private final Object stateLock = new Object(); private final Object handshakeLock = new Object(); private SSLOutputStream out; private SSLInputStream in; private long handshakeStartedMillis = 0; private BufferAllocator bufferAllocator = ConscryptEngine.getDefaultBufferAllocator(); // @GuardedBy("stateLock"); private int state = STATE_NEW; // The constructors should not be called except from the Platform class, because we may // want to construct a subclass instead. ConscryptEngineSocket(SSLParametersImpl sslParameters) throws IOException { engine = newEngine(sslParameters, this); } ConscryptEngineSocket(String hostname, int port, SSLParametersImpl sslParameters) throws IOException { super(hostname, port); engine = newEngine(sslParameters, this); } ConscryptEngineSocket(InetAddress address, int port, SSLParametersImpl sslParameters) throws IOException { super(address, port); engine = newEngine(sslParameters, this); } ConscryptEngineSocket(String hostname, int port, InetAddress clientAddress, int clientPort, SSLParametersImpl sslParameters) throws IOException { super(hostname, port, clientAddress, clientPort); engine = newEngine(sslParameters, this); } ConscryptEngineSocket(InetAddress address, int port, InetAddress clientAddress, int clientPort, SSLParametersImpl sslParameters) throws IOException { super(address, port, clientAddress, clientPort); engine = newEngine(sslParameters, this); } ConscryptEngineSocket(Socket socket, String hostname, int port, boolean autoClose, SSLParametersImpl sslParameters) throws IOException { super(socket, hostname, port, autoClose); engine = newEngine(sslParameters, this); } private static ConscryptEngine newEngine( SSLParametersImpl sslParameters, final ConscryptEngineSocket socket) { SSLParametersImpl modifiedParams; if (sslParameters.isSpake()) { modifiedParams = sslParameters.cloneWithSpake(); } else if (Platform.supportsX509ExtendedTrustManager()) { modifiedParams = sslParameters.cloneWithTrustManager( getDelegatingTrustManager(sslParameters.getX509TrustManager(), socket)); } else { modifiedParams = sslParameters; } ConscryptEngine engine = new ConscryptEngine(modifiedParams, socket.peerInfoProvider(), socket); // When the handshake completes, notify any listeners. engine.setHandshakeListener(new HandshakeListener() { /** * Protected by {@code stateLock} */ @Override public void onHandshakeFinished() { // Just call the outer class method. socket.onEngineHandshakeFinished(); } }); // Transition the engine state to MODE_SET engine.setUseClientMode(sslParameters.getUseClientMode()); return engine; } // Returns a trust manager that delegates to the given trust manager, but maps SSLEngine // references to the given ConscryptEngineSocket. Our internal engine will call // the SSLEngine-receiving methods, but our callers expect the SSLSocket-receiving // methods to get called. private static X509TrustManager getDelegatingTrustManager(final X509TrustManager delegate, final ConscryptEngineSocket socket) { if (delegate instanceof X509ExtendedTrustManager) { final X509ExtendedTrustManager extendedDelegate = (X509ExtendedTrustManager) delegate; return new X509ExtendedTrustManager() { @Override public void checkClientTrusted(X509Certificate[] x509Certificates, String s, Socket socket) throws CertificateException { throw new AssertionError("Should not be called"); } @Override public void checkServerTrusted(X509Certificate[] x509Certificates, String s, Socket socket) throws CertificateException { throw new AssertionError("Should not be called"); } @Override public void checkClientTrusted(X509Certificate[] x509Certificates, String s, SSLEngine sslEngine) throws CertificateException { extendedDelegate.checkClientTrusted(x509Certificates, s, socket); } @Override public void checkServerTrusted(X509Certificate[] x509Certificates, String s, SSLEngine sslEngine) throws CertificateException { extendedDelegate.checkServerTrusted(x509Certificates, s, socket); } @Override public void checkClientTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException { extendedDelegate.checkClientTrusted(x509Certificates, s); } @Override public void checkServerTrusted(X509Certificate[] x509Certificates, String s) throws CertificateException { extendedDelegate.checkServerTrusted(x509Certificates, s); } @Override public X509Certificate[] getAcceptedIssuers() { return extendedDelegate.getAcceptedIssuers(); } }; } return delegate; } @Override public final SSLParameters getSSLParameters() { return engine.getSSLParameters(); } @Override public final void setSSLParameters(SSLParameters sslParameters) { engine.setSSLParameters(sslParameters); } @Override public final void startHandshake() throws IOException { checkOpen(); try { synchronized (handshakeLock) { // Only lock stateLock when we begin the handshake. This is done so that we don't // hold the stateLock when we invoke the handshake completion listeners. synchronized (stateLock) { // Initialize the handshake if we haven't already. if (state == STATE_NEW) { transitionTo(STATE_HANDSHAKE_STARTED); engine.beginHandshake(); createInputStream(); createOutputStream(); } else { // We've either started the handshake already or have been closed. // Do nothing in both cases. // // NOTE: BoringSSL does not support initiating renegotiation, so we always // ignore addition handshake calls. return; } } doHandshake(); } } catch (IOException e) { close(); throw e; } catch (Exception e) { close(); // Convert anything else to a handshake exception. throw SSLUtils.toSSLHandshakeException(e); } } private void doHandshake() throws IOException { try { boolean finished = false; while (!finished) { switch (engine.getHandshakeStatus()) { case NEED_UNWRAP: if (in.processDataFromSocket(EmptyArray.BYTE, 0, 0) < 0) { // Can't complete the handshake due to EOF. close(); throw SSLUtils.toSSLHandshakeException( new EOFException("connection closed")); } break; case NEED_WRAP: { out.writeInternal(EMPTY_BUFFER); // Always flush handshake frames immediately. out.flushInternal(); break; } case NEED_TASK: { // Should never get here, since our engine never provides tasks. close(); throw new IllegalStateException("Engine tasks are unsupported"); } case NOT_HANDSHAKING: case FINISHED: { // Handshake is complete. finished = true; break; } default: { throw new IllegalStateException( "Unknown handshake status: " + engine.getHandshakeStatus()); } } } if (isState(STATE_HANDSHAKE_COMPLETED)) { // STATE_READY_HANDSHAKE_CUT_THROUGH will wake up any waiting threads which can // race with the listeners, but that's OK. transitionTo(STATE_READY_HANDSHAKE_CUT_THROUGH); notifyHandshakeCompletedListeners(); transitionTo(STATE_READY); } } catch (SSLException e) { drainOutgoingQueue(); close(); throw e; } catch (IOException e) { close(); throw e; } catch (Exception e) { close(); // Convert anything else to a handshake exception. throw SSLUtils.toSSLHandshakeException(e); } } private boolean isState(int desiredState) { synchronized (stateLock) { return state == desiredState; } } private int transitionTo(int newState) { synchronized (stateLock) { if (state == newState) { return state; } int previousState = state; boolean notify = false; switch (newState) { case STATE_HANDSHAKE_STARTED: handshakeStartedMillis = Platform.getMillisSinceBoot(); break; case STATE_READY_HANDSHAKE_CUT_THROUGH: if (handshakeStartedMillis > 0) { StatsLog statsLog = Platform.getStatsLog(); statsLog.countTlsHandshake(true, engine.getSession().getProtocol(), engine.getSession().getCipherSuite(), Platform.getMillisSinceBoot() - handshakeStartedMillis); handshakeStartedMillis = 0; } notify = true; break; case STATE_READY: notify = true; break; case STATE_CLOSED: if (handshakeStartedMillis > 0) { StatsLog statsLog = Platform.getStatsLog(); // Handshake was in progress and so must have failed. statsLog.countTlsHandshake(false, "TLS_PROTO_FAILED", "TLS_CIPHER_FAILED", Platform.getMillisSinceBoot() - handshakeStartedMillis); handshakeStartedMillis = 0; } notify = true; break; default: break; } state = newState; if (notify) { stateLock.notifyAll(); } return previousState; } } @Override public final InputStream getInputStream() throws IOException { checkOpen(); return createInputStream(); } private SSLInputStream createInputStream() { synchronized (stateLock) { if (in == null) { in = new SSLInputStream(); } } return in; } @Override public final OutputStream getOutputStream() throws IOException { checkOpen(); return createOutputStream(); } private SSLOutputStream createOutputStream() { synchronized (stateLock) { if (out == null) { out = new SSLOutputStream(); } } return out; } @Override public final SSLSession getHandshakeSession() { return engine.handshakeSession(); } @Override public final SSLSession getSession() { if (isConnected()) { try { waitForHandshake(); } catch (IOException e) { // Fall through } } return engine.getSession(); } @Override final SSLSession getActiveSession() { return engine.getSession(); } @Override public final boolean getEnableSessionCreation() { return engine.getEnableSessionCreation(); } @Override public final void setEnableSessionCreation(boolean flag) { engine.setEnableSessionCreation(flag); } @Override public final String[] getSupportedCipherSuites() { return engine.getSupportedCipherSuites(); } @Override public final String[] getEnabledCipherSuites() { return engine.getEnabledCipherSuites(); } @Override public final void setEnabledCipherSuites(String[] suites) { engine.setEnabledCipherSuites(suites); } @Override public final String[] getSupportedProtocols() { return engine.getSupportedProtocols(); } @Override public final String[] getEnabledProtocols() { return engine.getEnabledProtocols(); } @Override public final void setEnabledProtocols(String[] protocols) { engine.setEnabledProtocols(protocols); } /** * This method enables Server Name Indication. If the hostname is not a valid SNI hostname, * the SNI extension will be omitted from the handshake. * * @param hostname the desired SNI hostname, or null to disable */ @Override public final void setHostname(String hostname) { engine.setHostname(hostname); super.setHostname(hostname); } @Override public final void setUseSessionTickets(boolean useSessionTickets) { engine.setUseSessionTickets(useSessionTickets); } @Override public final void setChannelIdEnabled(boolean enabled) { engine.setChannelIdEnabled(enabled); } @Override public final byte[] getChannelId() throws SSLException { return engine.getChannelId(); } @Override public final void setChannelIdPrivateKey(PrivateKey privateKey) { engine.setChannelIdPrivateKey(privateKey); } @Override byte[] getTlsUnique() { return engine.getTlsUnique(); } @Override byte[] exportKeyingMaterial(String label, byte[] context, int length) throws SSLException { return engine.exportKeyingMaterial(label, context, length); } @Override public final boolean getUseClientMode() { return engine.getUseClientMode(); } @Override public final void setUseClientMode(boolean mode) { engine.setUseClientMode(mode); } @Override public final boolean getWantClientAuth() { return engine.getWantClientAuth(); } @Override public final boolean getNeedClientAuth() { return engine.getNeedClientAuth(); } @Override public final void setNeedClientAuth(boolean need) { engine.setNeedClientAuth(need); } @Override public final void setWantClientAuth(boolean want) { engine.setWantClientAuth(want); } @Override @SuppressWarnings("UnsynchronizedOverridesSynchronized") public final void close() throws IOException { // TODO: Close SSL sockets using a background thread so they close gracefully. if (stateLock == null) { // Constructor failed, e.g. superclass constructor called close() return; } int previousState = transitionTo(STATE_CLOSED); if (previousState == STATE_CLOSED) { return; } try { // Close the engine. engine.closeInbound(); engine.closeOutbound(); // Closing the outbound direction of a connected engine will trigger a TLS close // notify, which we should try and send. // If we don't, then closeOutbound won't be able to free resources because there are // bytes queued for transmission so drain the queue those and call closeOutbound a // second time. if (previousState >= STATE_HANDSHAKE_STARTED) { drainOutgoingQueue(); engine.closeOutbound(); } } finally { // In case of an exception thrown while closing the engine, we still need to close the // underlying socket and release any resources the input stream is holding. try { super.close(); } finally { if (in != null) { in.release(); } } } } @Override public void setHandshakeTimeout(int handshakeTimeoutMilliseconds) throws SocketException { // Not supported but ignored rather than throwing for compatibility: b/146041327 } @Override final void setApplicationProtocols(String[] protocols) { engine.setApplicationProtocols(protocols); } @Override final String[] getApplicationProtocols() { return engine.getApplicationProtocols(); } @Override public final String getApplicationProtocol() { return engine.getApplicationProtocol(); } @Override public final String getHandshakeApplicationProtocol() { return engine.getHandshakeApplicationProtocol(); } @Override public final void setApplicationProtocolSelector(ApplicationProtocolSelector selector) { setApplicationProtocolSelector( selector == null ? null : new ApplicationProtocolSelectorAdapter(this, selector)); } @Override final void setApplicationProtocolSelector(ApplicationProtocolSelectorAdapter selector) { engine.setApplicationProtocolSelector(selector); } void setBufferAllocator(BufferAllocator bufferAllocator) { engine.setBufferAllocator(bufferAllocator); this.bufferAllocator = bufferAllocator; } private void onEngineHandshakeFinished() { // Don't do anything here except change state. This method will be called from // e.g. wrap() which is non re-entrant so we can't call anything that might do // IO until after it exits, e.g. in doHandshake(). if (isState(STATE_HANDSHAKE_STARTED)) { transitionTo(STATE_HANDSHAKE_COMPLETED); } } /** * Waits for the handshake to complete. */ private void waitForHandshake() throws IOException { startHandshake(); synchronized (stateLock) { while (state != STATE_READY // Waiting threads are allowed to compete with handshake listeners for access. && state != STATE_READY_HANDSHAKE_CUT_THROUGH && state != STATE_CLOSED) { try { stateLock.wait(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException("Interrupted waiting for handshake", e); } } if (state == STATE_CLOSED) { throw new SocketException("Socket is closed"); } } } private void drainOutgoingQueue() { try { while (engine.pendingOutboundEncryptedBytes() > 0) { out.writeInternal(EMPTY_BUFFER); // Always flush handshake frames immediately. out.flushInternal(); } } catch (IOException e) { // Ignore } } private OutputStream getUnderlyingOutputStream() throws IOException { return super.getOutputStream(); } private InputStream getUnderlyingInputStream() throws IOException { return super.getInputStream(); } @Override public final String chooseServerAlias(X509KeyManager keyManager, String keyType) { return keyManager.chooseServerAlias(keyType, null, this); } @Override public final String chooseClientAlias(X509KeyManager keyManager, X500Principal[] issuers, String[] keyTypes) { return keyManager.chooseClientAlias(keyTypes, issuers, this); } /** * Wrap bytes written to the underlying socket. */ private final class SSLOutputStream extends OutputStream { private final Object writeLock = new Object(); private final ByteBuffer target; private final int targetArrayOffset; private OutputStream socketOutputStream; SSLOutputStream() { target = ByteBuffer.allocate(engine.getSession().getPacketBufferSize()); targetArrayOffset = target.arrayOffset(); } @Override public void close() throws IOException { ConscryptEngineSocket.this.close(); } @Override public void write(int b) throws IOException { waitForHandshake(); synchronized (writeLock) { write(new byte[] {(byte) b}); } } @Override public void write(byte[] b) throws IOException { waitForHandshake(); synchronized (writeLock) { writeInternal(ByteBuffer.wrap(b)); } } @Override public void write(byte[] b, int off, int len) throws IOException { waitForHandshake(); synchronized (writeLock) { writeInternal(ByteBuffer.wrap(b, off, len)); } } private void writeInternal(ByteBuffer buffer) throws IOException { Platform.blockGuardOnNetwork(); checkOpen(); init(); // Need to loop through at least once to enable handshaking where no application // bytes are processed. int len = buffer.remaining(); SSLEngineResult engineResult; do { target.clear(); engineResult = engine.wrap(buffer, target); if (engineResult.getStatus() != OK && engineResult.getStatus() != CLOSED) { throw new SSLException("Unexpected engine result " + engineResult.getStatus()); } if (target.position() != engineResult.bytesProduced()) { throw new SSLException("Engine bytesProduced " + engineResult.bytesProduced() + " does not match bytes written " + target.position()); } len -= engineResult.bytesConsumed(); if (len != buffer.remaining()) { throw new SSLException("Engine did not read the correct number of bytes"); } if (engineResult.getStatus() == CLOSED && engineResult.bytesProduced() == 0) { if (len > 0) { throw new SocketException("Socket closed"); } break; } target.flip(); // Write the data to the socket. writeToSocket(); } while (len > 0); } @Override public void flush() throws IOException { waitForHandshake(); synchronized (writeLock) { flushInternal(); } } private void flushInternal() throws IOException { checkOpen(); init(); socketOutputStream.flush(); } private void init() throws IOException { if (socketOutputStream == null) { socketOutputStream = getUnderlyingOutputStream(); } } private void writeToSocket() throws IOException { // Write the data to the socket. socketOutputStream.write(target.array(), targetArrayOffset, target.limit()); } } /** * Unwrap bytes read from the underlying socket. */ private final class SSLInputStream extends InputStream { private final Object readLock = new Object(); private final byte[] singleByte = new byte[1]; private final ByteBuffer fromEngine; private final ByteBuffer fromSocket; private final int fromSocketArrayOffset; private final AllocatedBuffer allocatedBuffer; private InputStream socketInputStream; SSLInputStream() { if (bufferAllocator != null) { allocatedBuffer = bufferAllocator.allocateDirectBuffer( engine.getSession().getApplicationBufferSize()); fromEngine = allocatedBuffer.nioBuffer(); } else { allocatedBuffer = null; fromEngine = ByteBuffer.allocateDirect(engine.getSession().getApplicationBufferSize()); } // Initially fromEngine.remaining() == 0. fromEngine.flip(); fromSocket = ByteBuffer.allocate(engine.getSession().getPacketBufferSize()); fromSocketArrayOffset = fromSocket.arrayOffset(); } @Override public void close() throws IOException { ConscryptEngineSocket.this.close(); } void release() { synchronized (readLock) { if (allocatedBuffer != null) { allocatedBuffer.release(); } } } @Override public int read() throws IOException { waitForHandshake(); synchronized (readLock) { // Handle returning of -1 if EOF is reached. int count = read(singleByte, 0, 1); if (count == -1) { // Handle EOF. return -1; } if (count != 1) { throw new SSLException("read incorrect number of bytes " + count); } return singleByte[0] & 0xff; } } @Override public int read(byte[] b) throws IOException { waitForHandshake(); synchronized (readLock) { return read(b, 0, b.length); } } @Override public int read(byte[] b, int off, int len) throws IOException { waitForHandshake(); if (len == 0) { return 0; } synchronized (readLock) { return readUntilDataAvailable(b, off, len); } } @Override public int available() throws IOException { waitForHandshake(); synchronized (readLock) { init(); return fromEngine.remaining(); } } private boolean isHandshaking(HandshakeStatus status) { switch(status) { case NEED_TASK: case NEED_WRAP: case NEED_UNWRAP: return true; default: return false; } } private int readUntilDataAvailable(byte[] b, int off, int len) throws IOException { int count; do { count = processDataFromSocket(b, off, len); } while (count == 0); return count; } // Returns any decrypted data from the engine. If no data is currently present in the // engine's output buffer, reads from the input socket until the engine has processed // at least one TLS record, then returns any data in the output buffer or 0 if no // data is available. This is used both during handshaking (in which case, the records // will produce no data and this method will return 0) and by the InputStream read() // methods that expect records to produce application data. private int processDataFromSocket(byte[] b, int off, int len) throws IOException { Platform.blockGuardOnNetwork(); checkOpen(); // Make sure the input stream has been created. init(); for (;;) { // Serve any remaining data from the engine first. if (fromEngine.remaining() > 0) { int readFromEngine = Math.min(fromEngine.remaining(), len); fromEngine.get(b, off, readFromEngine); return readFromEngine; } // Try to unwrap any data already in the socket buffer. boolean needMoreDataFromSocket = true; // Unwrap the unencrypted bytes into the engine buffer. fromSocket.flip(); fromEngine.clear(); boolean engineHandshaking = isHandshaking(engine.getHandshakeStatus()); SSLEngineResult engineResult = engine.unwrap(fromSocket, fromEngine); // Shift any remaining data to the beginning of the buffer so that // we can accommodate the next full packet. After this is called, // limit will be restored to capacity and position will point just // past the end of the data. fromSocket.compact(); fromEngine.flip(); switch (engineResult.getStatus()) { case BUFFER_UNDERFLOW: { if (engineResult.bytesProduced() == 0) { // Need to read more data from the socket. break; } // Also serve the data that was produced. needMoreDataFromSocket = false; break; } case OK: { // We processed the entire packet successfully... if (!engineHandshaking && isHandshaking(engineResult.getHandshakeStatus()) && isHandshakeFinished()) { // The received packet is the beginning of a renegotiation handshake. // Perform another handshake. renegotiate(); return 0; } needMoreDataFromSocket = false; break; } case CLOSED: { // EOF return -1; } default: { // Anything else is an error. throw new SSLException( "Unexpected engine result " + engineResult.getStatus()); } } if (!needMoreDataFromSocket && engineResult.bytesProduced() == 0) { // Read successfully, but produced no data. Possibly part of a // handshake. return 0; } // Read more data from the socket. if (needMoreDataFromSocket && readFromSocket() == -1) { // Failed to read the next encrypted packet before reaching EOF. return -1; } // Continue the loop and return the data from the engine buffer. } } private boolean isHandshakeFinished() { synchronized (stateLock) { return state > STATE_HANDSHAKE_STARTED; } } /** * Processes a renegotiation received from the remote peer. */ private void renegotiate() throws IOException { synchronized (handshakeLock) { doHandshake(); } } private void init() throws IOException { if (socketInputStream == null) { socketInputStream = getUnderlyingInputStream(); } } private int readFromSocket() throws IOException { try { // Read directly to the underlying array and increment the buffer position if // appropriate. int pos = fromSocket.position(); int lim = fromSocket.limit(); int read = socketInputStream.read( fromSocket.array(), fromSocketArrayOffset + pos, lim - pos); if (read > 0) { fromSocket.position(pos + read); } return read; } catch (EOFException e) { return -1; } } } }
googleapis/google-cloud-java
35,348
java-datastream/proto-google-cloud-datastream-v1alpha1/src/main/java/com/google/cloud/datastream/v1alpha1/DeletePrivateConnectionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datastream/v1alpha1/datastream.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datastream.v1alpha1; /** Protobuf type {@code google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest} */ public final class DeletePrivateConnectionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) DeletePrivateConnectionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use DeletePrivateConnectionRequest.newBuilder() to construct. private DeletePrivateConnectionRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private DeletePrivateConnectionRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new DeletePrivateConnectionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto .internal_static_google_cloud_datastream_v1alpha1_DeletePrivateConnectionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto .internal_static_google_cloud_datastream_v1alpha1_DeletePrivateConnectionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest.class, com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FORCE_FIELD_NUMBER = 3; private boolean force_ = false; /** * * * <pre> * Optional. If set to true, any child routes that belong to this PrivateConnection will * also be deleted. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (force_ != false) { output.writeBool(3, force_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (force_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, force_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest)) { return super.equals(obj); } com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest other = (com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getForce() != other.getForce()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + FORCE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getForce()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** Protobuf type {@code google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto .internal_static_google_cloud_datastream_v1alpha1_DeletePrivateConnectionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto .internal_static_google_cloud_datastream_v1alpha1_DeletePrivateConnectionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest.class, com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest.Builder.class); } // Construct using // com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; force_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto .internal_static_google_cloud_datastream_v1alpha1_DeletePrivateConnectionRequest_descriptor; } @java.lang.Override public com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest getDefaultInstanceForType() { return com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest build() { com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest buildPartial() { com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest result = new com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.force_ = force_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) { return mergeFrom( (com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest other) { if (other == com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getForce() != false) { setForce(other.getForce()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { force_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the private connectivity configuration to delete. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. A request ID to identify requests. Specify a unique request ID * so that if you must retry your request, the server will know to ignore * the request if it has already been completed. The server will guarantee * that for at least 60 minutes after the first request. * * For example, consider a situation where you make an initial request and the * request times out. If you make the request again with the same request ID, * the server can check if original operation with the same request ID was * received, and if so, will ignore the second request. This prevents clients * from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean force_; /** * * * <pre> * Optional. If set to true, any child routes that belong to this PrivateConnection will * also be deleted. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The force. */ @java.lang.Override public boolean getForce() { return force_; } /** * * * <pre> * Optional. If set to true, any child routes that belong to this PrivateConnection will * also be deleted. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The force to set. * @return This builder for chaining. */ public Builder setForce(boolean value) { force_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set to true, any child routes that belong to this PrivateConnection will * also be deleted. * </pre> * * <code>bool force = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearForce() { bitField0_ = (bitField0_ & ~0x00000004); force_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest) private static final com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest(); } public static com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<DeletePrivateConnectionRequest> PARSER = new com.google.protobuf.AbstractParser<DeletePrivateConnectionRequest>() { @java.lang.Override public DeletePrivateConnectionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<DeletePrivateConnectionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<DeletePrivateConnectionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datastream.v1alpha1.DeletePrivateConnectionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,361
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/BackendBucketCdnPolicyCacheKeyPolicy.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * Message containing what to include in the cache key for a request for Cloud CDN. * </pre> * * Protobuf type {@code google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy} */ public final class BackendBucketCdnPolicyCacheKeyPolicy extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) BackendBucketCdnPolicyCacheKeyPolicyOrBuilder { private static final long serialVersionUID = 0L; // Use BackendBucketCdnPolicyCacheKeyPolicy.newBuilder() to construct. private BackendBucketCdnPolicyCacheKeyPolicy( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BackendBucketCdnPolicyCacheKeyPolicy() { includeHttpHeaders_ = com.google.protobuf.LazyStringArrayList.emptyList(); queryStringWhitelist_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BackendBucketCdnPolicyCacheKeyPolicy(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendBucketCdnPolicyCacheKeyPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendBucketCdnPolicyCacheKeyPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.class, com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.Builder.class); } public static final int INCLUDE_HTTP_HEADERS_FIELD_NUMBER = 2489606; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList includeHttpHeaders_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @return A list containing the includeHttpHeaders. */ public com.google.protobuf.ProtocolStringList getIncludeHttpHeadersList() { return includeHttpHeaders_; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @return The count of includeHttpHeaders. */ public int getIncludeHttpHeadersCount() { return includeHttpHeaders_.size(); } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param index The index of the element to return. * @return The includeHttpHeaders at the given index. */ public java.lang.String getIncludeHttpHeaders(int index) { return includeHttpHeaders_.get(index); } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param index The index of the value to return. * @return The bytes of the includeHttpHeaders at the given index. */ public com.google.protobuf.ByteString getIncludeHttpHeadersBytes(int index) { return includeHttpHeaders_.getByteString(index); } public static final int QUERY_STRING_WHITELIST_FIELD_NUMBER = 52456496; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList queryStringWhitelist_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @return A list containing the queryStringWhitelist. */ public com.google.protobuf.ProtocolStringList getQueryStringWhitelistList() { return queryStringWhitelist_; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @return The count of queryStringWhitelist. */ public int getQueryStringWhitelistCount() { return queryStringWhitelist_.size(); } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param index The index of the element to return. * @return The queryStringWhitelist at the given index. */ public java.lang.String getQueryStringWhitelist(int index) { return queryStringWhitelist_.get(index); } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param index The index of the value to return. * @return The bytes of the queryStringWhitelist at the given index. */ public com.google.protobuf.ByteString getQueryStringWhitelistBytes(int index) { return queryStringWhitelist_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < includeHttpHeaders_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString( output, 2489606, includeHttpHeaders_.getRaw(i)); } for (int i = 0; i < queryStringWhitelist_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString( output, 52456496, queryStringWhitelist_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; { int dataSize = 0; for (int i = 0; i < includeHttpHeaders_.size(); i++) { dataSize += computeStringSizeNoTag(includeHttpHeaders_.getRaw(i)); } size += dataSize; size += 4 * getIncludeHttpHeadersList().size(); } { int dataSize = 0; for (int i = 0; i < queryStringWhitelist_.size(); i++) { dataSize += computeStringSizeNoTag(queryStringWhitelist_.getRaw(i)); } size += dataSize; size += 5 * getQueryStringWhitelistList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy)) { return super.equals(obj); } com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy other = (com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) obj; if (!getIncludeHttpHeadersList().equals(other.getIncludeHttpHeadersList())) return false; if (!getQueryStringWhitelistList().equals(other.getQueryStringWhitelistList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getIncludeHttpHeadersCount() > 0) { hash = (37 * hash) + INCLUDE_HTTP_HEADERS_FIELD_NUMBER; hash = (53 * hash) + getIncludeHttpHeadersList().hashCode(); } if (getQueryStringWhitelistCount() > 0) { hash = (37 * hash) + QUERY_STRING_WHITELIST_FIELD_NUMBER; hash = (53 * hash) + getQueryStringWhitelistList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message containing what to include in the cache key for a request for Cloud CDN. * </pre> * * Protobuf type {@code google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicyOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendBucketCdnPolicyCacheKeyPolicy_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendBucketCdnPolicyCacheKeyPolicy_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.class, com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.Builder.class); } // Construct using com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; includeHttpHeaders_ = com.google.protobuf.LazyStringArrayList.emptyList(); queryStringWhitelist_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_BackendBucketCdnPolicyCacheKeyPolicy_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy getDefaultInstanceForType() { return com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy build() { com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy buildPartial() { com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy result = new com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { includeHttpHeaders_.makeImmutable(); result.includeHttpHeaders_ = includeHttpHeaders_; } if (((from_bitField0_ & 0x00000002) != 0)) { queryStringWhitelist_.makeImmutable(); result.queryStringWhitelist_ = queryStringWhitelist_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) { return mergeFrom((com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy other) { if (other == com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy.getDefaultInstance()) return this; if (!other.includeHttpHeaders_.isEmpty()) { if (includeHttpHeaders_.isEmpty()) { includeHttpHeaders_ = other.includeHttpHeaders_; bitField0_ |= 0x00000001; } else { ensureIncludeHttpHeadersIsMutable(); includeHttpHeaders_.addAll(other.includeHttpHeaders_); } onChanged(); } if (!other.queryStringWhitelist_.isEmpty()) { if (queryStringWhitelist_.isEmpty()) { queryStringWhitelist_ = other.queryStringWhitelist_; bitField0_ |= 0x00000002; } else { ensureQueryStringWhitelistIsMutable(); queryStringWhitelist_.addAll(other.queryStringWhitelist_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 19916850: { java.lang.String s = input.readStringRequireUtf8(); ensureIncludeHttpHeadersIsMutable(); includeHttpHeaders_.add(s); break; } // case 19916850 case 419651970: { java.lang.String s = input.readStringRequireUtf8(); ensureQueryStringWhitelistIsMutable(); queryStringWhitelist_.add(s); break; } // case 419651970 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.LazyStringArrayList includeHttpHeaders_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureIncludeHttpHeadersIsMutable() { if (!includeHttpHeaders_.isModifiable()) { includeHttpHeaders_ = new com.google.protobuf.LazyStringArrayList(includeHttpHeaders_); } bitField0_ |= 0x00000001; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @return A list containing the includeHttpHeaders. */ public com.google.protobuf.ProtocolStringList getIncludeHttpHeadersList() { includeHttpHeaders_.makeImmutable(); return includeHttpHeaders_; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @return The count of includeHttpHeaders. */ public int getIncludeHttpHeadersCount() { return includeHttpHeaders_.size(); } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param index The index of the element to return. * @return The includeHttpHeaders at the given index. */ public java.lang.String getIncludeHttpHeaders(int index) { return includeHttpHeaders_.get(index); } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param index The index of the value to return. * @return The bytes of the includeHttpHeaders at the given index. */ public com.google.protobuf.ByteString getIncludeHttpHeadersBytes(int index) { return includeHttpHeaders_.getByteString(index); } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param index The index to set the value at. * @param value The includeHttpHeaders to set. * @return This builder for chaining. */ public Builder setIncludeHttpHeaders(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureIncludeHttpHeadersIsMutable(); includeHttpHeaders_.set(index, value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param value The includeHttpHeaders to add. * @return This builder for chaining. */ public Builder addIncludeHttpHeaders(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureIncludeHttpHeadersIsMutable(); includeHttpHeaders_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param values The includeHttpHeaders to add. * @return This builder for chaining. */ public Builder addAllIncludeHttpHeaders(java.lang.Iterable<java.lang.String> values) { ensureIncludeHttpHeadersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, includeHttpHeaders_); bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @return This builder for chaining. */ public Builder clearIncludeHttpHeaders() { includeHttpHeaders_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); ; onChanged(); return this; } /** * * * <pre> * Allows HTTP request headers (by name) to be used in the cache key. * </pre> * * <code>repeated string include_http_headers = 2489606;</code> * * @param value The bytes of the includeHttpHeaders to add. * @return This builder for chaining. */ public Builder addIncludeHttpHeadersBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureIncludeHttpHeadersIsMutable(); includeHttpHeaders_.add(value); bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList queryStringWhitelist_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureQueryStringWhitelistIsMutable() { if (!queryStringWhitelist_.isModifiable()) { queryStringWhitelist_ = new com.google.protobuf.LazyStringArrayList(queryStringWhitelist_); } bitField0_ |= 0x00000002; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @return A list containing the queryStringWhitelist. */ public com.google.protobuf.ProtocolStringList getQueryStringWhitelistList() { queryStringWhitelist_.makeImmutable(); return queryStringWhitelist_; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @return The count of queryStringWhitelist. */ public int getQueryStringWhitelistCount() { return queryStringWhitelist_.size(); } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param index The index of the element to return. * @return The queryStringWhitelist at the given index. */ public java.lang.String getQueryStringWhitelist(int index) { return queryStringWhitelist_.get(index); } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param index The index of the value to return. * @return The bytes of the queryStringWhitelist at the given index. */ public com.google.protobuf.ByteString getQueryStringWhitelistBytes(int index) { return queryStringWhitelist_.getByteString(index); } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param index The index to set the value at. * @param value The queryStringWhitelist to set. * @return This builder for chaining. */ public Builder setQueryStringWhitelist(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureQueryStringWhitelistIsMutable(); queryStringWhitelist_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param value The queryStringWhitelist to add. * @return This builder for chaining. */ public Builder addQueryStringWhitelist(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureQueryStringWhitelistIsMutable(); queryStringWhitelist_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param values The queryStringWhitelist to add. * @return This builder for chaining. */ public Builder addAllQueryStringWhitelist(java.lang.Iterable<java.lang.String> values) { ensureQueryStringWhitelistIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, queryStringWhitelist_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @return This builder for chaining. */ public Builder clearQueryStringWhitelist() { queryStringWhitelist_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * Names of query string parameters to include in cache keys. Default parameters are always included. '&amp;' and '=' will be percent encoded and not treated as delimiters. * </pre> * * <code>repeated string query_string_whitelist = 52456496;</code> * * @param value The bytes of the queryStringWhitelist to add. * @return This builder for chaining. */ public Builder addQueryStringWhitelistBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureQueryStringWhitelistIsMutable(); queryStringWhitelist_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy) private static final com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy(); } public static com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BackendBucketCdnPolicyCacheKeyPolicy> PARSER = new com.google.protobuf.AbstractParser<BackendBucketCdnPolicyCacheKeyPolicy>() { @java.lang.Override public BackendBucketCdnPolicyCacheKeyPolicy parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BackendBucketCdnPolicyCacheKeyPolicy> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BackendBucketCdnPolicyCacheKeyPolicy> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.BackendBucketCdnPolicyCacheKeyPolicy getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/lucene
35,410
lucene/suggest/src/java/org/apache/lucene/search/suggest/analyzing/AnalyzingInfixSuggester.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.lucene.search.suggest.analyzing; import java.io.Closeable; import java.io.IOException; import java.io.StringReader; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.AnalyzerWrapper; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.suggest.InputIterator; import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.BytesRef; // TODO: // - a PostingsFormat that stores super-high-freq terms as // a bitset should be a win for the prefix terms? // (LUCENE-5052) // - we could offer a better integration with // DocumentDictionary and NRT? so that your suggester // "automatically" keeps in sync w/ your index /** * Analyzes the input text and then suggests matches based on prefix matches to any tokens in the * indexed text. This also highlights the tokens that match. * * <p>This suggester supports payloads. Matches are sorted only by the suggest weight; it would be * nice to support blended score + weight sort in the future. This means this suggester best applies * when there is a strong a-priori ranking of all the suggestions. * * <p>This suggester supports contexts, including arbitrary binary terms. * * @lucene.experimental */ public class AnalyzingInfixSuggester extends Lookup implements Closeable { /** * edgegrams for searching short prefixes without Prefix Query that's controlled by {@linkplain * #minPrefixChars} */ protected static final String TEXTGRAMS_FIELD_NAME = "textgrams"; /** Field name used for the indexed text. */ protected static final String TEXT_FIELD_NAME = "text"; /** Field name used for the indexed text, as a StringField, for exact lookup. */ protected static final String EXACT_TEXT_FIELD_NAME = "exacttext"; /** * Field name used for the indexed context, as a StringField and a SortedSetDVField, for * filtering. */ protected static final String CONTEXTS_FIELD_NAME = "contexts"; /** Analyzer used at search time */ protected final Analyzer queryAnalyzer; /** Analyzer used at index time */ protected final Analyzer indexAnalyzer; private final Directory dir; final int minPrefixChars; private final boolean allTermsRequired; private final boolean highlight; private final boolean commitOnBuild; private final boolean closeIndexWriterOnBuild; /** * Used for ongoing NRT additions/updates. May be null depending on <code>closeIndexWriterOnBuild * </code> constructor arg */ protected IndexWriter writer; /** Used to manage concurrent access to writer */ protected final Object writerLock = new Object(); /** * {@link IndexSearcher} used for lookups. May be null if {@link Directory} did not exist on * instantiation and neither {@link #build}, {@link #add}, or {@link #update} have been called */ protected SearcherManager searcherMgr; /** Used to manage concurrent access to searcherMgr */ protected final ReadWriteLock searcherMgrLock = new ReentrantReadWriteLock(); private final Lock searcherMgrReadLock = searcherMgrLock.readLock(); private final Lock searcherMgrWriteLock = searcherMgrLock.writeLock(); /** Default minimum number of leading characters before PrefixQuery is used (4). */ public static final int DEFAULT_MIN_PREFIX_CHARS = 4; /** Default boolean clause option for multiple terms matching (all terms required). */ public static final boolean DEFAULT_ALL_TERMS_REQUIRED = true; /** Default higlighting option. */ public static final boolean DEFAULT_HIGHLIGHT = true; /** Default option to close the IndexWriter once the index has been built. */ protected static final boolean DEFAULT_CLOSE_INDEXWRITER_ON_BUILD = true; /** How we sort the postings and search results. */ private static final Sort SORT = new Sort(new SortField("weight", SortField.Type.LONG, true)); /** * Create a new instance, loading from a previously built AnalyzingInfixSuggester directory, if it * exists. This directory must be private to the infix suggester (i.e., not an external Lucene * index). Note that {@link #close} will also close the provided directory. */ public AnalyzingInfixSuggester(Directory dir, Analyzer analyzer) throws IOException { this( dir, analyzer, analyzer, DEFAULT_MIN_PREFIX_CHARS, false, DEFAULT_ALL_TERMS_REQUIRED, DEFAULT_HIGHLIGHT); } /** * Create a new instance, loading from a previously built AnalyzingInfixSuggester directory, if it * exists. This directory must be private to the infix suggester (i.e., not an external Lucene * index). Note that {@link #close} will also close the provided directory. * * @param minPrefixChars Minimum number of leading characters before PrefixQuery is used (default * 4). Prefixes shorter than this are indexed as character ngrams (increasing index size but * making lookups faster). * @param commitOnBuild Call commit after the index has finished building. This would persist the * suggester index to disk and future instances of this suggester can use this pre-built * dictionary. */ public AnalyzingInfixSuggester( Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars, boolean commitOnBuild) throws IOException { this( dir, indexAnalyzer, queryAnalyzer, minPrefixChars, commitOnBuild, DEFAULT_ALL_TERMS_REQUIRED, DEFAULT_HIGHLIGHT); } /** * Create a new instance, loading from a previously built AnalyzingInfixSuggester directory, if it * exists. This directory must be private to the infix suggester (i.e., not an external Lucene * index). Note that {@link #close} will also close the provided directory. * * @param minPrefixChars Minimum number of leading characters before PrefixQuery is used (default * 4). Prefixes shorter than this are indexed as character ngrams (increasing index size but * making lookups faster). * @param commitOnBuild Call commit after the index has finished building. This would persist the * suggester index to disk and future instances of this suggester can use this pre-built * dictionary. * @param allTermsRequired All terms in the suggest query must be matched. * @param highlight Highlight suggest query in suggestions. */ public AnalyzingInfixSuggester( Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars, boolean commitOnBuild, boolean allTermsRequired, boolean highlight) throws IOException { this( dir, indexAnalyzer, queryAnalyzer, minPrefixChars, commitOnBuild, allTermsRequired, highlight, DEFAULT_CLOSE_INDEXWRITER_ON_BUILD); } /** * Create a new instance, loading from a previously built AnalyzingInfixSuggester directory, if it * exists. This directory must be private to the infix suggester (i.e., not an external Lucene * index). Note that {@link #close} will also close the provided directory. * * @param minPrefixChars Minimum number of leading characters before PrefixQuery is used (default * 4). Prefixes shorter than this are indexed as character ngrams (increasing index size but * making lookups faster). * @param commitOnBuild Call commit after the index has finished building. This would persist the * suggester index to disk and future instances of this suggester can use this pre-built * dictionary. * @param allTermsRequired All terms in the suggest query must be matched. * @param highlight Highlight suggest query in suggestions. * @param closeIndexWriterOnBuild If true, the IndexWriter will be closed after the index has * finished building. */ public AnalyzingInfixSuggester( Directory dir, Analyzer indexAnalyzer, Analyzer queryAnalyzer, int minPrefixChars, boolean commitOnBuild, boolean allTermsRequired, boolean highlight, boolean closeIndexWriterOnBuild) throws IOException { if (minPrefixChars < 0) { throw new IllegalArgumentException("minPrefixChars must be >= 0; got: " + minPrefixChars); } this.queryAnalyzer = queryAnalyzer; this.indexAnalyzer = indexAnalyzer; this.dir = dir; this.minPrefixChars = minPrefixChars; this.commitOnBuild = commitOnBuild; this.allTermsRequired = allTermsRequired; this.highlight = highlight; this.closeIndexWriterOnBuild = closeIndexWriterOnBuild; if (DirectoryReader.indexExists(dir)) { // Already built; open it: searcherMgr = new SearcherManager(dir, null); } } private void setAndCloseOldSearcherManager(final SearcherManager newSearcherMgr) throws IOException { searcherMgrWriteLock.lock(); try { final SearcherManager oldSearcherMgr = searcherMgr; searcherMgr = newSearcherMgr; if (oldSearcherMgr != null) { oldSearcherMgr.close(); } } finally { searcherMgrWriteLock.unlock(); } } /** Override this to customize index settings, e.g. which codec to use. */ protected IndexWriterConfig getIndexWriterConfig( Analyzer indexAnalyzer, IndexWriterConfig.OpenMode openMode) { IndexWriterConfig iwc = new IndexWriterConfig(indexAnalyzer); iwc.setOpenMode(openMode); // This way all merged segments will be sorted at // merge time, allow for per-segment early termination // when those segments are searched: iwc.setIndexSort(SORT); return iwc; } /** Subclass can override to choose a specific {@link Directory} implementation. */ protected Directory getDirectory(Path path) throws IOException { return FSDirectory.open(path); } @Override public void build(InputIterator iter) throws IOException { synchronized (writerLock) { if (writer != null) { writer.close(); writer = null; } try { // First pass: build a temporary normal Lucene index, // just indexing the suggestions as they iterate: writer = new IndexWriter( dir, getIndexWriterConfig(getGramAnalyzer(), IndexWriterConfig.OpenMode.CREATE)); // long t0 = System.nanoTime(); // TODO: use threads? BytesRef text; while ((text = iter.next()) != null) { BytesRef payload; if (iter.hasPayloads()) { payload = iter.payload(); } else { payload = null; } add(text, iter.contexts(), iter.weight(), payload); } // System.out.println("initial indexing time: " + ((System.nanoTime()-t0)/1000000) + " // msec"); if (commitOnBuild || closeIndexWriterOnBuild) { commit(); } setAndCloseOldSearcherManager(new SearcherManager(writer, null)); } catch (Throwable t) { if (writer != null) { writer.rollback(); writer = null; } throw t; } if (closeIndexWriterOnBuild) { writer.close(); writer = null; } } } /** * Commits all pending changes made to this suggester to disk. * * @see IndexWriter#commit */ public void commit() throws IOException { if (writer == null) { if (searcherMgr == null || closeIndexWriterOnBuild == false) { throw new IllegalStateException("Cannot commit on an closed writer. Add documents first"); } // else no-op: writer was committed and closed after the index was built, so commit is // unnecessary } else { writer.commit(); } } private Analyzer getGramAnalyzer() { return new AnalyzerWrapper(Analyzer.PER_FIELD_REUSE_STRATEGY) { @Override protected Analyzer getWrappedAnalyzer(String fieldName) { return indexAnalyzer; } @Override protected TokenStreamComponents wrapComponents( String fieldName, TokenStreamComponents components) { assert !(fieldName.equals(TEXTGRAMS_FIELD_NAME) && minPrefixChars == 0) : "no need \"textgrams\" when minPrefixChars=" + minPrefixChars; if (fieldName.equals(TEXTGRAMS_FIELD_NAME) && minPrefixChars > 0) { // TODO: should use an EdgeNGramTokenFilterFactory here TokenFilter filter = new EdgeNGramTokenFilter(components.getTokenStream(), 1, minPrefixChars, false); return new TokenStreamComponents(components.getSource(), filter); } else { return components; } } }; } private void ensureOpen() throws IOException { synchronized (writerLock) { if (writer == null) { if (DirectoryReader.indexExists(dir)) { // Already built; open it: writer = new IndexWriter( dir, getIndexWriterConfig(getGramAnalyzer(), IndexWriterConfig.OpenMode.APPEND)); } else { writer = new IndexWriter( dir, getIndexWriterConfig(getGramAnalyzer(), IndexWriterConfig.OpenMode.CREATE)); } setAndCloseOldSearcherManager(new SearcherManager(writer, null)); } } } /** * Adds a new suggestion. Be sure to use {@link #update} instead if you want to replace a previous * suggestion. After adding or updating a batch of new suggestions, you must call {@link #refresh} * in the end in order to see the suggestions in {@link #lookup} */ public void add(BytesRef text, Set<BytesRef> contexts, long weight, BytesRef payload) throws IOException { ensureOpen(); writer.addDocument(buildDocument(text, contexts, weight, payload)); } /** * Updates a previous suggestion, matching the exact same text as before. Use this to change the * weight or payload of an already added suggestion. If you know this text is not already present * you can use {@link #add} instead. After adding or updating a batch of new suggestions, you must * call {@link #refresh} in the end in order to see the suggestions in {@link #lookup} */ public void update(BytesRef text, Set<BytesRef> contexts, long weight, BytesRef payload) throws IOException { ensureOpen(); writer.updateDocument( new Term(EXACT_TEXT_FIELD_NAME, text.utf8ToString()), buildDocument(text, contexts, weight, payload)); } private Document buildDocument( BytesRef text, Set<BytesRef> contexts, long weight, BytesRef payload) throws IOException { String textString = text.utf8ToString(); Document doc = new Document(); FieldType ft = getTextFieldType(); doc.add(new Field(TEXT_FIELD_NAME, textString, ft)); if (minPrefixChars > 0) { doc.add(new Field(TEXTGRAMS_FIELD_NAME, textString, ft)); } doc.add(new StringField(EXACT_TEXT_FIELD_NAME, textString, Field.Store.NO)); doc.add(new BinaryDocValuesField(TEXT_FIELD_NAME, text)); doc.add(new NumericDocValuesField("weight", weight)); if (payload != null) { doc.add(new BinaryDocValuesField("payloads", payload)); } if (contexts != null) { for (BytesRef context : contexts) { doc.add(new StringField(CONTEXTS_FIELD_NAME, context, Field.Store.NO)); doc.add(new SortedSetDocValuesField(CONTEXTS_FIELD_NAME, context)); } } return doc; } /** * Reopens the underlying searcher; it's best to "batch up" many additions/updates, and then call * refresh once in the end. */ public void refresh() throws IOException { if (searcherMgr == null) { throw new IllegalStateException("suggester was not built"); } if (writer != null) { searcherMgr.maybeRefreshBlocking(); } // else no-op: writer was committed and closed after the index was built // and before searchMgr was constructed, so refresh is unnecessary } /** * Subclass can override this method to change the field type of the text field e.g. to change the * index options */ protected FieldType getTextFieldType() { FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); ft.setIndexOptions(IndexOptions.DOCS); ft.setOmitNorms(true); return ft; } @Override public List<LookupResult> lookup( CharSequence key, Set<BytesRef> contexts, boolean onlyMorePopular, int num) throws IOException { return lookup(key, contexts, num, allTermsRequired, highlight); } /** Lookup, without any context. */ public List<LookupResult> lookup( CharSequence key, int num, boolean allTermsRequired, boolean doHighlight) throws IOException { return lookup(key, (BooleanQuery) null, num, allTermsRequired, doHighlight); } /** * Lookup, with context but without booleans. Context booleans default to SHOULD, so each * suggestion must have at least one of the contexts. */ public List<LookupResult> lookup( CharSequence key, Set<BytesRef> contexts, int num, boolean allTermsRequired, boolean doHighlight) throws IOException { return lookup(key, toQuery(contexts), num, allTermsRequired, doHighlight); } /** * This is called if the last token isn't ended (e.g. user did not type a space after it). Return * an appropriate Query clause to add to the BooleanQuery. */ protected Query getLastTokenQuery(String token) throws IOException { if (token.length() < minPrefixChars) { // The leading ngram was directly indexed: return new TermQuery(new Term(TEXTGRAMS_FIELD_NAME, token)); } return new PrefixQuery(new Term(TEXT_FIELD_NAME, token)); } /** * Retrieve suggestions, specifying whether all terms must match ({@code allTermsRequired}) and * whether the hits should be highlighted ({@code doHighlight}). */ public List<LookupResult> lookup( CharSequence key, Map<BytesRef, BooleanClause.Occur> contextInfo, int num, boolean allTermsRequired, boolean doHighlight) throws IOException { return lookup(key, toQuery(contextInfo), num, allTermsRequired, doHighlight); } private BooleanQuery toQuery(Map<BytesRef, BooleanClause.Occur> contextInfo) { if (contextInfo == null || contextInfo.isEmpty()) { return null; } BooleanQuery.Builder contextFilter = new BooleanQuery.Builder(); for (Map.Entry<BytesRef, BooleanClause.Occur> entry : contextInfo.entrySet()) { addContextToQuery(contextFilter, entry.getKey(), entry.getValue()); } return contextFilter.build(); } private BooleanQuery toQuery(Set<BytesRef> contextInfo) { if (contextInfo == null || contextInfo.isEmpty()) { return null; } BooleanQuery.Builder contextFilter = new BooleanQuery.Builder(); for (BytesRef context : contextInfo) { addContextToQuery(contextFilter, context, BooleanClause.Occur.SHOULD); } return contextFilter.build(); } /** * This method is handy as we do not need access to internal fields such as CONTEXTS_FIELD_NAME in * order to build queries However, here may not be its best location. * * @param query an instance of @See {@link BooleanQuery} * @param context the context * @param clause one of {@link Occur} */ public void addContextToQuery( BooleanQuery.Builder query, BytesRef context, BooleanClause.Occur clause) { // NOTE: we "should" wrap this in // ConstantScoreQuery, or maybe send this as a // Filter instead to search. // TODO: if we had a BinaryTermField we could fix // this "must be valid ut8f" limitation: query.add(new TermQuery(new Term(CONTEXTS_FIELD_NAME, context)), clause); } /** * This is an advanced method providing the capability to send down to the suggester any arbitrary * lucene query to be used to filter the result of the suggester * * @param key the keyword being looked for * @param contextQuery an arbitrary Lucene query to be used to filter the result of the suggester. * {@link #addContextToQuery} could be used to build this contextQuery. * @param num number of items to return * @param allTermsRequired all searched terms must match or not * @param doHighlight if true, the matching term will be highlighted in the search result * @return the result of the suggester * @throws IOException f the is IO exception while reading data from the index */ @Override public List<LookupResult> lookup( CharSequence key, BooleanQuery contextQuery, int num, boolean allTermsRequired, boolean doHighlight) throws IOException { if (searcherMgr == null) { throw new IllegalStateException("suggester was not built"); } final BooleanClause.Occur occur; if (allTermsRequired) { occur = BooleanClause.Occur.MUST; } else { occur = BooleanClause.Occur.SHOULD; } BooleanQuery.Builder query; Set<String> matchedTokens; String prefixToken = null; try (TokenStream ts = queryAnalyzer.tokenStream("", new StringReader(key.toString()))) { // long t0 = System.currentTimeMillis(); ts.reset(); final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); final OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class); String lastToken = null; query = new BooleanQuery.Builder(); int maxEndOffset = -1; matchedTokens = new HashSet<>(); while (ts.incrementToken()) { if (lastToken != null) { matchedTokens.add(lastToken); query.add(new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)), occur); } lastToken = termAtt.toString(); if (lastToken != null) { maxEndOffset = Math.max(maxEndOffset, offsetAtt.endOffset()); } } ts.end(); if (lastToken != null) { Query lastQuery; if (maxEndOffset == offsetAtt.endOffset()) { // Use PrefixQuery (or the ngram equivalent) when // there was no trailing discarded chars in the // string (e.g. whitespace), so that if query does // not end with a space we show prefix matches for // that token: lastQuery = getLastTokenQuery(lastToken); prefixToken = lastToken; } else { // Use TermQuery for an exact match if there were // trailing discarded chars (e.g. whitespace), so // that if query ends with a space we only show // exact matches for that term: matchedTokens.add(lastToken); lastQuery = new TermQuery(new Term(TEXT_FIELD_NAME, lastToken)); } if (lastQuery != null) { query.add(lastQuery, occur); } } if (contextQuery != null) { boolean allMustNot = true; for (BooleanClause clause : contextQuery.clauses()) { if (clause.occur() != BooleanClause.Occur.MUST_NOT) { allMustNot = false; break; } } if (allMustNot) { // All are MUST_NOT: add the contextQuery to the main query instead (not as sub-query) for (BooleanClause clause : contextQuery.clauses()) { query.add(clause); } } else if (allTermsRequired == false) { // We must carefully upgrade the query clauses to MUST: BooleanQuery.Builder newQuery = new BooleanQuery.Builder(); newQuery.add(query.build(), BooleanClause.Occur.MUST); newQuery.add(contextQuery, BooleanClause.Occur.MUST); query = newQuery; } else { // Add contextQuery as sub-query query.add(contextQuery, BooleanClause.Occur.MUST); } } } // TODO: we could allow blended sort here, combining // weight w/ score. Now we ignore score and sort only // by weight: Query finalQuery = finishQuery(query, allTermsRequired); // System.out.println("finalQuery=" + finalQuery); // Sort by weight, descending: List<LookupResult> results = null; SearcherManager mgr; IndexSearcher searcher; searcherMgrReadLock.lock(); try { mgr = searcherMgr; // acquire & release on same SearcherManager, via local reference searcher = mgr.acquire(); } finally { searcherMgrReadLock.unlock(); } try { TopFieldCollectorManager c = new TopFieldCollectorManager(SORT, num, null, 1); // System.out.println("got searcher=" + searcher); TopFieldDocs hits = searcher.search(finalQuery, c); // Slower way if postings are not pre-sorted by weight: // hits = searcher.search(query, null, num, SORT); results = createResults(searcher, hits, num, key, doHighlight, matchedTokens, prefixToken); } finally { mgr.release(searcher); } // System.out.println((System.currentTimeMillis() - t0) + " ms for infix suggest"); // System.out.println(results); return results; } /** * Create the results based on the search hits. Can be overridden by subclass to add particular * behavior (e.g. weight transformation). Note that there is no prefix token (the {@code * prefixToken} argument will be null) whenever the final token in the incoming request was in * fact finished (had trailing characters, such as white-space). * * @throws IOException If there are problems reading fields from the underlying Lucene index. */ protected List<LookupResult> createResults( IndexSearcher searcher, TopFieldDocs hits, int num, CharSequence charSequence, boolean doHighlight, Set<String> matchedTokens, String prefixToken) throws IOException { List<LeafReaderContext> leaves = searcher.getIndexReader().leaves(); List<LookupResult> results = new ArrayList<>(); for (int i = 0; i < hits.scoreDocs.length; i++) { FieldDoc fd = (FieldDoc) hits.scoreDocs[i]; BinaryDocValues textDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), TEXT_FIELD_NAME); textDV.advance(fd.doc); BytesRef term = textDV.binaryValue(); String text = term.utf8ToString(); long score = (Long) fd.fields[0]; // This will just be null if app didn't pass payloads to build(): // TODO: maybe just stored fields? they compress... BinaryDocValues payloadsDV = MultiDocValues.getBinaryValues(searcher.getIndexReader(), "payloads"); BytesRef payload; if (payloadsDV != null) { if (payloadsDV.advance(fd.doc) == fd.doc) { payload = BytesRef.deepCopyOf(payloadsDV.binaryValue()); } else { payload = new BytesRef(BytesRef.EMPTY_BYTES); } } else { payload = null; } // Must look up sorted-set by segment: int segment = ReaderUtil.subIndex(fd.doc, leaves); SortedSetDocValues contextsDV = leaves.get(segment).reader().getSortedSetDocValues(CONTEXTS_FIELD_NAME); Set<BytesRef> contexts; if (contextsDV != null) { contexts = new HashSet<>(); int targetDocID = fd.doc - leaves.get(segment).docBase; if (contextsDV.advance(targetDocID) == targetDocID) { for (int j = 0; j < contextsDV.docValueCount(); j++) { BytesRef context = BytesRef.deepCopyOf(contextsDV.lookupOrd(contextsDV.nextOrd())); contexts.add(context); } } } else { contexts = null; } LookupResult result; if (doHighlight) { result = new LookupResult( text, highlight(text, matchedTokens, prefixToken), score, payload, contexts); } else { result = new LookupResult(text, score, payload, contexts); } results.add(result); } return results; } /** Subclass can override this to tweak the Query before searching. */ protected Query finishQuery(BooleanQuery.Builder in, boolean allTermsRequired) { return in.build(); } /** * Override this method to customize the Object representing a single highlighted suggestions; the * result is set on each {@link org.apache.lucene.search.suggest.Lookup.LookupResult#highlightKey} * member. */ protected Object highlight(String text, Set<String> matchedTokens, String prefixToken) throws IOException { try (TokenStream ts = queryAnalyzer.tokenStream("text", new StringReader(text))) { CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); OffsetAttribute offsetAtt = ts.addAttribute(OffsetAttribute.class); ts.reset(); StringBuilder sb = new StringBuilder(); int upto = 0; while (ts.incrementToken()) { String token = termAtt.toString(); int startOffset = offsetAtt.startOffset(); int endOffset = offsetAtt.endOffset(); if (upto < startOffset) { addNonMatch(sb, text.substring(upto, startOffset)); upto = startOffset; } else if (upto > startOffset) { continue; } if (matchedTokens.contains(token)) { // Token matches. addWholeMatch(sb, text.substring(startOffset, endOffset), token); upto = endOffset; } else if (prefixToken != null && token.startsWith(prefixToken)) { addPrefixMatch(sb, text.substring(startOffset, endOffset), token, prefixToken); upto = endOffset; } } ts.end(); int endOffset = offsetAtt.endOffset(); if (upto < endOffset) { addNonMatch(sb, text.substring(upto)); } return sb.toString(); } } /** * Called while highlighting a single result, to append a non-matching chunk of text from the * suggestion to the provided fragments list. * * @param sb The {@code StringBuilder} to append to * @param text The text chunk to add */ protected void addNonMatch(StringBuilder sb, String text) { sb.append(text); } /** * Called while highlighting a single result, to append the whole matched token to the provided * fragments list. * * @param sb The {@code StringBuilder} to append to * @param surface The surface form (original) text * @param analyzed The analyzed token corresponding to the surface form text */ protected void addWholeMatch(StringBuilder sb, String surface, String analyzed) { sb.append("<b>"); sb.append(surface); sb.append("</b>"); } /** * Called while highlighting a single result, to append a matched prefix token, to the provided * fragments list. * * @param sb The {@code StringBuilder} to append to * @param surface The fragment of the surface form (indexed during {@link #build}, corresponding * to this match * @param analyzed The analyzed token that matched * @param prefixToken The prefix of the token that matched */ protected void addPrefixMatch( StringBuilder sb, String surface, String analyzed, String prefixToken) { // TODO: apps can try to invert their analysis logic // here, e.g. downcase the two before checking prefix: if (prefixToken.length() >= surface.length()) { addWholeMatch(sb, surface, analyzed); return; } sb.append("<b>"); sb.append(surface, 0, prefixToken.length()); sb.append("</b>"); sb.append(surface.substring(prefixToken.length())); } @Override public boolean store(DataOutput in) throws IOException { return false; } @Override public boolean load(DataInput out) throws IOException { return false; } @Override public void close() throws IOException { if (searcherMgr != null) { searcherMgr.close(); searcherMgr = null; } if (writer != null) { writer.close(); writer = null; } if (dir != null) { dir.close(); } } @Override public long ramBytesUsed() { return 0L; } @Override public long getCount() throws IOException { if (searcherMgr == null) { return 0; } SearcherManager mgr; IndexSearcher searcher; searcherMgrReadLock.lock(); try { mgr = searcherMgr; // acquire & release on same SearcherManager, via local reference searcher = mgr.acquire(); } finally { searcherMgrReadLock.unlock(); } try { return searcher.getIndexReader().numDocs(); } finally { mgr.release(searcher); } } }
googleapis/google-cloud-java
34,553
java-securitycenter-settings/proto-google-cloud-securitycenter-settings-v1beta1/src/main/java/com/google/cloud/securitycenter/settings/v1beta1/SettingsServiceProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/settings/v1beta1/securitycenter_settings_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.settings.v1beta1; public final class SettingsServiceProto { private SettingsServiceProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_GetServiceAccountRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_GetServiceAccountRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ServiceAccount_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ServiceAccount_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_GetSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_GetSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ResetSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ResetSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_GetComponentSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_GetComponentSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateComponentSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateComponentSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ResetComponentSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ResetComponentSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveComponentSettingsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveComponentSettingsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsResponse_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\nRgoogle/cloud/securitycenter/settings/v" + "1beta1/securitycenter_settings_service.p" + "roto\022,google.cloud.securitycenter.settin" + "gs.v1beta1\032\034google/api/annotations.proto" + "\032\027google/api/client.proto\032\037google/api/fi" + "eld_behavior.proto\032\031google/api/resource." + "proto\032Egoogle/cloud/securitycenter/setti" + "ngs/v1beta1/component_settings.proto\032;go" + "ogle/cloud/securitycenter/settings/v1bet" + "a1/detector.proto\032;google/cloud/security" + "center/settings/v1beta1/settings.proto\032\033" + "google/protobuf/empty.proto\032 google/prot" + "obuf/field_mask.proto\"^\n\030GetServiceAccou" + "ntRequest\022B\n\004name\030\001 \001(\tB4\340A\002\372A.\n,securit" + "ycenter.googleapis.com/ServiceAccount\"\227\001" + "\n\016ServiceAccount\022\014\n\004name\030\001 \001(\t\022\027\n\017servic" + "e_account\030\002 \001(\t:^\352A[\n,securitycenter.goo" + "gleapis.com/ServiceAccount\022+organization" + "s/{organization}/serviceAccount\"R\n\022GetSe" + "ttingsRequest\022<\n\004name\030\001 \001(\tB.\340A\002\372A(\n&sec" + "uritycenter.googleapis.com/Settings\"\227\001\n\025" + "UpdateSettingsRequest\022M\n\010settings\030\001 \001(\0132" + "6.google.cloud.securitycenter.settings.v" + "1beta1.SettingsB\003\340A\002\022/\n\013update_mask\030\002 \001(" + "\0132\032.google.protobuf.FieldMask\"b\n\024ResetSe" + "ttingsRequest\022<\n\004name\030\001 \001(\tB.\340A\002\372A(\n&sec" + "uritycenter.googleapis.com/Settings\022\014\n\004e" + "tag\030\002 \001(\t\"r\n\027BatchGetSettingsRequest\022H\n\006" + "parent\030\001 \001(\tB8\340A\002\372A2\n0cloudresourcemanag" + "er.googleapis.com/Organization\022\r\n\005names\030" + "\002 \003(\t\"d\n\030BatchGetSettingsResponse\022H\n\010set" + "tings\030\001 \003(\01326.google.cloud.securitycente" + "r.settings.v1beta1.Settings\"a\n!Calculate" + "EffectiveSettingsRequest\022<\n\004name\030\001 \001(\tB." + "\340A\002\372A(\n&securitycenter.googleapis.com/Se" + "ttings\"\325\001\n&BatchCalculateEffectiveSettin" + "gsRequest\022H\n\006parent\030\001 \001(\tB8\340A\002\372A2\n0cloud" + "resourcemanager.googleapis.com/Organizat" + "ion\022a\n\010requests\030\002 \003(\0132O.google.cloud.sec" + "uritycenter.settings.v1beta1.CalculateEf" + "fectiveSettingsRequest\"s\n\'BatchCalculate" + "EffectiveSettingsResponse\022H\n\010settings\030\001 " + "\003(\01326.google.cloud.securitycenter.settin" + "gs.v1beta1.Settings\"d\n\033GetComponentSetti" + "ngsRequest\022E\n\004name\030\001 \001(\tB7\340A\002\372A1\n/securi" + "tycenter.googleapis.com/ComponentSetting" + "s\"\263\001\n\036UpdateComponentSettingsRequest\022`\n\022" + "component_settings\030\001 \001(\0132?.google.cloud." + "securitycenter.settings.v1beta1.Componen" + "tSettingsB\003\340A\002\022/\n\013update_mask\030\002 \001(\0132\032.go" + "ogle.protobuf.FieldMask\"t\n\035ResetComponen" + "tSettingsRequest\022E\n\004name\030\001 \001(\tB7\340A\002\372A1\n/" + "securitycenter.googleapis.com/ComponentS" + "ettings\022\014\n\004etag\030\002 \001(\t\"s\n*CalculateEffect" + "iveComponentSettingsRequest\022E\n\004name\030\001 \001(" + "\tB7\340A\002\372A1\n/securitycenter.googleapis.com" + "/ComponentSettings\"\227\001\n\024ListDetectorsRequ" + "est\022H\n\006parent\030\001 \001(\tB8\340A\002\372A2\n0cloudresour" + "cemanager.googleapis.com/Organization\022\016\n" + "\006filter\030\002 \001(\t\022\021\n\tpage_size\030\003 \001(\005\022\022\n\npage" + "_token\030\004 \001(\t\"{\n\025ListDetectorsResponse\022I\n" + "\tdetectors\030\001 \003(\01326.google.cloud.security" + "center.settings.v1beta1.Detector\022\027\n\017next" + "_page_token\030\002 \001(\t\"\210\001\n\025ListComponentsRequ" + "est\022H\n\006parent\030\001 \001(\tB8\340A\002\372A2\n0cloudresour" + "cemanager.googleapis.com/Organization\022\021\n" + "\tpage_size\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"E\n\026" + "ListComponentsResponse\022\022\n\ncomponents\030\001 \003" + "(\t\022\027\n\017next_page_token\030\002 \001(\t2\2043\n\035Security" + "CenterSettingsService\022\341\001\n\021GetServiceAcco" + "unt\022F.google.cloud.securitycenter.settin" + "gs.v1beta1.GetServiceAccountRequest\032<.go" + "ogle.cloud.securitycenter.settings.v1bet" + "a1.ServiceAccount\"F\332A\004name\202\323\344\223\0029\0227/setti" + "ngs/v1beta1/{name=organizations/*/servic" + "eAccount}\022\371\003\n\013GetSettings\022@.google.cloud" + ".securitycenter.settings.v1beta1.GetSett" + "ingsRequest\0326.google.cloud.securitycente" + "r.settings.v1beta1.Settings\"\357\002\332A\004name\202\323\344" + "\223\002\341\002\0221/settings/v1beta1/{name=organizati" + "ons/*/settings}Z-\022+/settings/v1beta1/{na" + "me=folders/*/settings}Z.\022,/settings/v1be" + "ta1/{name=projects/*/settings}ZE\022C/setti" + "ngs/v1beta1/{name=projects/*/locations/*" + "/clusters/*/settings}ZC\022A/settings/v1bet" + "a1/{name=projects/*/regions/*/clusters/*" + "/settings}ZA\022?/settings/v1beta1/{name=pr" + "ojects/*/zones/*/clusters/*/settings}\022\201\005" + "\n\016UpdateSettings\022C.google.cloud.security" + "center.settings.v1beta1.UpdateSettingsRe" + "quest\0326.google.cloud.securitycenter.sett" + "ings.v1beta1.Settings\"\361\003\332A\024settings,upda" + "te_mask\202\323\344\223\002\323\0032:/settings/v1beta1/{setti" + "ngs.name=organizations/*/settings}:\010sett" + "ingsZ@24/settings/v1beta1/{settings.name" + "=folders/*/settings}:\010settingsZA25/setti" + "ngs/v1beta1/{settings.name=projects/*/se" + "ttings}:\010settingsZX2L/settings/v1beta1/{" + "settings.name=projects/*/locations/*/clu" + "sters/*/settings}:\010settingsZV2J/settings" + "/v1beta1/{settings.name=projects/*/regio" + "ns/*/clusters/*/settings}:\010settingsZT2H/" + "settings/v1beta1/{settings.name=projects" + "/*/zones/*/clusters/*/settings}:\010setting" + "s\022\214\004\n\rResetSettings\022B.google.cloud.secur" + "itycenter.settings.v1beta1.ResetSettings" + "Request\032\026.google.protobuf.Empty\"\236\003\202\323\344\223\002\227" + "\003\"7/settings/v1beta1/{name=organizations" + "/*/settings}:reset:\001*Z6\"1/settings/v1bet" + "a1/{name=folders/*/settings}:reset:\001*Z7\"" + "2/settings/v1beta1/{name=projects/*/sett" + "ings}:reset:\001*ZN\"I/settings/v1beta1/{nam" + "e=projects/*/locations/*/clusters/*/sett" + "ings}:reset:\001*ZL\"G/settings/v1beta1/{nam" + "e=projects/*/regions/*/clusters/*/settin" + "gs}:reset:\001*ZJ\"E/settings/v1beta1/{name=" + "projects/*/zones/*/clusters/*/settings}:" + "reset:\001*\022\347\001\n\020BatchGetSettings\022E.google.c" + "loud.securitycenter.settings.v1beta1.Bat" + "chGetSettingsRequest\032F.google.cloud.secu" + "ritycenter.settings.v1beta1.BatchGetSett" + "ingsResponse\"D\202\323\344\223\002>\022</settings/v1beta1/" + "{parent=organizations/*}/settings:batchG" + "et\022\211\005\n\032CalculateEffectiveSettings\022O.goog" + "le.cloud.securitycenter.settings.v1beta1" + ".CalculateEffectiveSettingsRequest\0326.goo" + "gle.cloud.securitycenter.settings.v1beta" + "1.Settings\"\341\003\332A\004name\202\323\344\223\002\323\003\022D/settings/v" + "1beta1/{name=organizations/*/effectiveSe" + "ttings}:calculateZ@\022>/settings/v1beta1/{" + "name=folders/*/effectiveSettings}:calcul" + "ateZA\022?/settings/v1beta1/{name=projects/" + "*/effectiveSettings}:calculateZX\022V/setti" + "ngs/v1beta1/{name=projects/*/locations/*" + "/clusters/*/effectiveSettings}:calculate" + "ZV\022T/settings/v1beta1/{name=projects/*/r" + "egions/*/clusters/*/effectiveSettings}:c" + "alculateZT\022R/settings/v1beta1/{name=proj" + "ects/*/zones/*/clusters/*/effectiveSetti" + "ngs}:calculate\022\246\002\n\037BatchCalculateEffecti" + "veSettings\022T.google.cloud.securitycenter" + ".settings.v1beta1.BatchCalculateEffectiv" + "eSettingsRequest\032U.google.cloud.security" + "center.settings.v1beta1.BatchCalculateEf" + "fectiveSettingsResponse\"V\202\323\344\223\002P\"K/settin" + "gs/v1beta1/{parent=organizations/*}/effe" + "ctiveSettings:batchCalculate:\001*\022\342\004\n\024GetC" + "omponentSettings\022I.google.cloud.security" + "center.settings.v1beta1.GetComponentSett" + "ingsRequest\032?.google.cloud.securitycente" + "r.settings.v1beta1.ComponentSettings\"\275\003\332" + "A\004name\202\323\344\223\002\257\003\022>/settings/v1beta1/{name=o" + "rganizations/*/components/*/settings}Z:\022" + "8/settings/v1beta1/{name=folders/*/compo" + "nents/*/settings}Z;\0229/settings/v1beta1/{" + "name=projects/*/components/*/settings}ZR" + "\022P/settings/v1beta1/{name=projects/*/loc" + "ations/*/clusters/*/components/*/setting" + "s}ZP\022N/settings/v1beta1/{name=projects/*" + "/regions/*/clusters/*/components/*/setti" + "ngs}ZN\022L/settings/v1beta1/{name=projects" + "/*/zones/*/clusters/*/components/*/setti" + "ngs}\022\354\006\n\027UpdateComponentSettings\022L.googl" + "e.cloud.securitycenter.settings.v1beta1." + "UpdateComponentSettingsRequest\032?.google." + "cloud.securitycenter.settings.v1beta1.Co" + "mponentSettings\"\301\005\332A\036component_settings," + "update_mask\202\323\344\223\002\231\0052Q/settings/v1beta1/{c" + "omponent_settings.name=organizations/*/c" + "omponents/*/settings}:\022component_setting" + "sZa2K/settings/v1beta1/{component_settin" + "gs.name=folders/*/components/*/settings}" + ":\022component_settingsZb2L/settings/v1beta" + "1/{component_settings.name=projects/*/co" + "mponents/*/settings}:\022component_settings" + "Zy2c/settings/v1beta1/{component_setting" + "s.name=projects/*/locations/*/clusters/*" + "/components/*/settings}:\022component_setti" + "ngsZw2a/settings/v1beta1/{component_sett" + "ings.name=projects/*/regions/*/clusters/" + "*/components/*/settings}:\022component_sett" + "ingsZu2_/settings/v1beta1/{component_set" + "tings.name=projects/*/zones/*/clusters/*" + "/components/*/settings}:\022component_setti" + "ngs\022\354\004\n\026ResetComponentSettings\022K.google." + "cloud.securitycenter.settings.v1beta1.Re" + "setComponentSettingsRequest\032\026.google.pro" + "tobuf.Empty\"\354\003\202\323\344\223\002\345\003\"D/settings/v1beta1" + "/{name=organizations/*/components/*/sett" + "ings}:reset:\001*ZC\">/settings/v1beta1/{nam" + "e=folders/*/components/*/settings}:reset" + ":\001*ZD\"?/settings/v1beta1/{name=projects/" + "*/components/*/settings}:reset:\001*Z[\"V/se" + "ttings/v1beta1/{name=projects/*/location" + "s/*/clusters/*/components/*/settings}:re" + "set:\001*ZY\"T/settings/v1beta1/{name=projec" + "ts/*/regions/*/clusters/*/components/*/s" + "ettings}:reset:\001*ZW\"R/settings/v1beta1/{" + "name=projects/*/zones/*/clusters/*/compo" + "nents/*/settings}:reset:\001*\022\362\005\n#Calculate" + "EffectiveComponentSettings\022X.google.clou" + "d.securitycenter.settings.v1beta1.Calcul" + "ateEffectiveComponentSettingsRequest\032?.g" + "oogle.cloud.securitycenter.settings.v1be" + "ta1.ComponentSettings\"\257\004\332A\004name\202\323\344\223\002\241\004\022Q" + "/settings/v1beta1/{name=organizations/*/" + "components/*/effectiveSettings}:calculat" + "eZM\022K/settings/v1beta1/{name=folders/*/c" + "omponents/*/effectiveSettings}:calculate" + "ZN\022L/settings/v1beta1/{name=projects/*/c" + "omponents/*/effectiveSettings}:calculate" + "Ze\022c/settings/v1beta1/{name=projects/*/l" + "ocations/*/clusters/*/components/*/effec" + "tiveSettings}:calculateZc\022a/settings/v1b" + "eta1/{name=projects/*/regions/*/clusters" + "/*/components/*/effectiveSettings}:calcu" + "lateZa\022_/settings/v1beta1/{name=projects" + "/*/zones/*/clusters/*/components/*/effec" + "tiveSettings}:calculate\022\337\001\n\rListDetector" + "s\022B.google.cloud.securitycenter.settings" + ".v1beta1.ListDetectorsRequest\032C.google.c" + "loud.securitycenter.settings.v1beta1.Lis" + "tDetectorsResponse\"E\332A\006parent\202\323\344\223\0026\0224/se" + "ttings/v1beta1/{parent=organizations/*}/" + "detectors\022\343\001\n\016ListComponents\022C.google.cl" + "oud.securitycenter.settings.v1beta1.List" + "ComponentsRequest\032D.google.cloud.securit" + "ycenter.settings.v1beta1.ListComponentsR" + "esponse\"F\332A\006parent\202\323\344\223\0027\0225/settings/v1be" + "ta1/{parent=organizations/*}/components\032" + "Q\312A\035securitycenter.googleapis.com\322A.http" + "s://www.googleapis.com/auth/cloud-platfo" + "rmB\254\002\n0com.google.cloud.securitycenter.s" + "ettings.v1beta1B\024SettingsServiceProtoP\001Z" + "Lcloud.google.com/go/securitycenter/sett" + "ings/apiv1beta1/settingspb;settingspb\370\001\001" + "\252\002,Google.Cloud.SecurityCenter.Settings." + "V1Beta1\312\002,Google\\Cloud\\SecurityCenter\\Se" + "ttings\\V1beta1\352\0020Google::Cloud::Security" + "Center::Settings::V1beta1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.cloud.securitycenter.settings.v1beta1.ComponentSettingsProto .getDescriptor(), com.google.cloud.securitycenter.settings.v1beta1.DetectorsProto.getDescriptor(), com.google.cloud.securitycenter.settings.v1beta1.SettingsProto.getDescriptor(), com.google.protobuf.EmptyProto.getDescriptor(), com.google.protobuf.FieldMaskProto.getDescriptor(), }); internal_static_google_cloud_securitycenter_settings_v1beta1_GetServiceAccountRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_securitycenter_settings_v1beta1_GetServiceAccountRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_GetServiceAccountRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ServiceAccount_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_securitycenter_settings_v1beta1_ServiceAccount_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ServiceAccount_descriptor, new java.lang.String[] { "Name", "ServiceAccount", }); internal_static_google_cloud_securitycenter_settings_v1beta1_GetSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_securitycenter_settings_v1beta1_GetSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_GetSettingsRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateSettingsRequest_descriptor, new java.lang.String[] { "Settings", "UpdateMask", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ResetSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_securitycenter_settings_v1beta1_ResetSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ResetSettingsRequest_descriptor, new java.lang.String[] { "Name", "Etag", }); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_descriptor, new java.lang.String[] { "Parent", "Names", }); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsResponse_descriptor, new java.lang.String[] { "Settings", }); internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveSettingsRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsRequest_descriptor, new java.lang.String[] { "Parent", "Requests", }); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsResponse_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_BatchCalculateEffectiveSettingsResponse_descriptor, new java.lang.String[] { "Settings", }); internal_static_google_cloud_securitycenter_settings_v1beta1_GetComponentSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_securitycenter_settings_v1beta1_GetComponentSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_GetComponentSettingsRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateComponentSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateComponentSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_UpdateComponentSettingsRequest_descriptor, new java.lang.String[] { "ComponentSettings", "UpdateMask", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ResetComponentSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_securitycenter_settings_v1beta1_ResetComponentSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ResetComponentSettingsRequest_descriptor, new java.lang.String[] { "Name", "Etag", }); internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveComponentSettingsRequest_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveComponentSettingsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_CalculateEffectiveComponentSettingsRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsRequest_descriptor, new java.lang.String[] { "Parent", "Filter", "PageSize", "PageToken", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ListDetectorsResponse_descriptor, new java.lang.String[] { "Detectors", "NextPageToken", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsRequest_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsRequest_descriptor, new java.lang.String[] { "Parent", "PageSize", "PageToken", }); internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsResponse_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_securitycenter_settings_v1beta1_ListComponentsResponse_descriptor, new java.lang.String[] { "Components", "NextPageToken", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); registry.add(com.google.api.ResourceProto.resource); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.cloud.securitycenter.settings.v1beta1.ComponentSettingsProto.getDescriptor(); com.google.cloud.securitycenter.settings.v1beta1.DetectorsProto.getDescriptor(); com.google.cloud.securitycenter.settings.v1beta1.SettingsProto.getDescriptor(); com.google.protobuf.EmptyProto.getDescriptor(); com.google.protobuf.FieldMaskProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache/kafka
35,683
clients/src/main/java/org/apache/kafka/common/security/authenticator/SaslClientAuthenticator.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.common.security.authenticator; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.NetworkClient; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.errors.IllegalSaslStateException; import org.apache.kafka.common.errors.SaslAuthenticationException; import org.apache.kafka.common.errors.UnsupportedSaslMechanismException; import org.apache.kafka.common.internals.SecurityManagerCompatibility; import org.apache.kafka.common.message.ApiVersionsResponseData.ApiVersion; import org.apache.kafka.common.message.RequestHeaderData; import org.apache.kafka.common.message.SaslAuthenticateRequestData; import org.apache.kafka.common.message.SaslHandshakeRequestData; import org.apache.kafka.common.network.Authenticator; import org.apache.kafka.common.network.ByteBufferSend; import org.apache.kafka.common.network.NetworkReceive; import org.apache.kafka.common.network.ReauthenticationContext; import org.apache.kafka.common.network.Send; import org.apache.kafka.common.network.TransportLayer; import org.apache.kafka.common.protocol.ApiKeys; import org.apache.kafka.common.protocol.Errors; import org.apache.kafka.common.protocol.types.SchemaException; import org.apache.kafka.common.requests.AbstractResponse; import org.apache.kafka.common.requests.ApiVersionsRequest; import org.apache.kafka.common.requests.ApiVersionsResponse; import org.apache.kafka.common.requests.RequestHeader; import org.apache.kafka.common.requests.SaslAuthenticateRequest; import org.apache.kafka.common.requests.SaslAuthenticateResponse; import org.apache.kafka.common.requests.SaslHandshakeRequest; import org.apache.kafka.common.requests.SaslHandshakeResponse; import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler; import org.apache.kafka.common.security.auth.KafkaPrincipal; import org.apache.kafka.common.security.auth.KafkaPrincipalSerde; import org.apache.kafka.common.security.kerberos.KerberosError; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Utils; import org.slf4j.Logger; import java.io.IOException; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.channels.SelectionKey; import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.concurrent.CompletionException; import javax.security.auth.Subject; import javax.security.sasl.Sasl; import javax.security.sasl.SaslClient; import javax.security.sasl.SaslException; public class SaslClientAuthenticator implements Authenticator { /** * The internal state transitions for initial authentication of a channel are * declared in order, starting with {@link #SEND_APIVERSIONS_REQUEST} and ending * in either {@link #COMPLETE} or {@link #FAILED}. * <p> * Re-authentication of a channel starts with the state * {@link #REAUTH_PROCESS_ORIG_APIVERSIONS_RESPONSE} and then flows to * {@link #REAUTH_SEND_HANDSHAKE_REQUEST} followed by * {@link #REAUTH_RECEIVE_HANDSHAKE_OR_OTHER_RESPONSE} and then * {@link #REAUTH_INITIAL}; after that the flow joins the authentication flow * at the {@link #INTERMEDIATE} state and ends at either {@link #COMPLETE} or * {@link #FAILED}. */ public enum SaslState { SEND_APIVERSIONS_REQUEST, // Initial state for authentication: client sends ApiVersionsRequest in this state when authenticating RECEIVE_APIVERSIONS_RESPONSE, // Awaiting ApiVersionsResponse from server SEND_HANDSHAKE_REQUEST, // Received ApiVersionsResponse, send SaslHandshake request RECEIVE_HANDSHAKE_RESPONSE, // Awaiting SaslHandshake response from server when authenticating INITIAL, // Initial authentication state starting SASL token exchange for configured mechanism, send first token INTERMEDIATE, // Intermediate state during SASL token exchange, process challenges and send responses CLIENT_COMPLETE, // Sent response to last challenge. If using SaslAuthenticate, wait for authentication status from server, else COMPLETE COMPLETE, // Authentication sequence complete. If using SaslAuthenticate, this state implies successful authentication. FAILED, // Failed authentication due to an error at some stage REAUTH_PROCESS_ORIG_APIVERSIONS_RESPONSE, // Initial state for re-authentication: process ApiVersionsResponse from original authentication REAUTH_SEND_HANDSHAKE_REQUEST, // Processed original ApiVersionsResponse, send SaslHandshake request as part of re-authentication REAUTH_RECEIVE_HANDSHAKE_OR_OTHER_RESPONSE, // Awaiting SaslHandshake response from server when re-authenticating, and may receive other, in-flight responses sent prior to start of re-authentication as well REAUTH_INITIAL, // Initial re-authentication state starting SASL token exchange for configured mechanism, send first token } private static final short DISABLE_KAFKA_SASL_AUTHENTICATE_HEADER = -1; private static final Random RNG = new Random(); /** * the reserved range of correlation id for Sasl requests. * * Noted: there is a story about reserved range. The response of LIST_OFFSET is compatible to response of SASL_HANDSHAKE. * Hence, we could miss the schema error when using schema of SASL_HANDSHAKE to parse response of LIST_OFFSET. * For example: the IllegalStateException caused by mismatched correlation id is thrown if following steps happens. * 1) sent LIST_OFFSET * 2) sent SASL_HANDSHAKE * 3) receive response of LIST_OFFSET * 4) succeed to use schema of SASL_HANDSHAKE to parse response of LIST_OFFSET * 5) throw IllegalStateException due to mismatched correlation id * As a simple approach, we force Sasl requests to use a reserved correlation id which is separated from those * used in NetworkClient for Kafka requests. Hence, we can guarantee that every SASL request will throw * SchemaException due to correlation id mismatch during reauthentication */ public static final int MAX_RESERVED_CORRELATION_ID = Integer.MAX_VALUE; /** * We only expect one request in-flight a time during authentication so the small range is fine. */ public static final int MIN_RESERVED_CORRELATION_ID = MAX_RESERVED_CORRELATION_ID - 7; /** * @return true if the correlation id is reserved for SASL request. otherwise, false */ public static boolean isReserved(int correlationId) { return correlationId >= MIN_RESERVED_CORRELATION_ID; } private final Subject subject; private final String servicePrincipal; private final String host; private final String node; private final String mechanism; private final TransportLayer transportLayer; private final SaslClient saslClient; private final Map<String, ?> configs; private final String clientPrincipalName; private final AuthenticateCallbackHandler callbackHandler; private final Time time; private final Logger log; private final ReauthInfo reauthInfo; // buffers used in `authenticate` private NetworkReceive netInBuffer; private Send netOutBuffer; // Current SASL state private SaslState saslState; // Next SASL state to be set when outgoing writes associated with the current SASL state complete private SaslState pendingSaslState; // Correlation ID for the next request private int correlationId; // Request header for which response from the server is pending private RequestHeader currentRequestHeader; // Version of SaslAuthenticate request/responses private short saslAuthenticateVersion; // Version of SaslHandshake request/responses private short saslHandshakeVersion; @SuppressWarnings("this-escape") public SaslClientAuthenticator(Map<String, ?> configs, AuthenticateCallbackHandler callbackHandler, String node, Subject subject, String servicePrincipal, String host, String mechanism, TransportLayer transportLayer, Time time, LogContext logContext) { this.node = node; this.subject = subject; this.callbackHandler = callbackHandler; this.host = host; this.servicePrincipal = servicePrincipal; this.mechanism = mechanism; this.correlationId = 0; this.transportLayer = transportLayer; this.configs = configs; this.saslAuthenticateVersion = DISABLE_KAFKA_SASL_AUTHENTICATE_HEADER; this.time = time; this.log = logContext.logger(getClass()); this.reauthInfo = new ReauthInfo(); try { setSaslState(SaslState.SEND_APIVERSIONS_REQUEST); // determine client principal from subject for Kerberos to use as authorization id for the SaslClient. // For other mechanisms, the authenticated principal (username for PLAIN and SCRAM) is used as // authorization id. Hence the principal is not specified for creating the SaslClient. if (mechanism.equals(SaslConfigs.GSSAPI_MECHANISM)) this.clientPrincipalName = firstPrincipal(subject); else this.clientPrincipalName = null; saslClient = createSaslClient(); } catch (Exception e) { throw new SaslAuthenticationException("Failed to configure SaslClientAuthenticator", e); } } // visible for testing SaslClient createSaslClient() { try { return SecurityManagerCompatibility.get().callAs(subject, () -> { String[] mechs = {mechanism}; log.debug("Creating SaslClient: client={};service={};serviceHostname={};mechs={}", clientPrincipalName, servicePrincipal, host, Arrays.toString(mechs)); SaslClient retvalSaslClient = Sasl.createSaslClient(mechs, clientPrincipalName, servicePrincipal, host, configs, callbackHandler); if (retvalSaslClient == null) { throw new SaslAuthenticationException("Failed to create SaslClient with mechanism " + mechanism); } return retvalSaslClient; }); } catch (CompletionException e) { throw new SaslAuthenticationException("Failed to create SaslClient with mechanism " + mechanism, e.getCause()); } } /** * Sends an empty message to the server to initiate the authentication process. It then evaluates server challenges * via `SaslClient.evaluateChallenge` and returns client responses until authentication succeeds or fails. * * The messages are sent and received as size delimited bytes that consists of a 4 byte network-ordered size N * followed by N bytes representing the opaque payload. */ @SuppressWarnings("fallthrough") public void authenticate() throws IOException { if (netOutBuffer != null && !flushNetOutBufferAndUpdateInterestOps()) return; switch (saslState) { case SEND_APIVERSIONS_REQUEST: // Always use version 0 request since brokers treat requests with schema exceptions as GSSAPI tokens ApiVersionsRequest apiVersionsRequest = new ApiVersionsRequest.Builder().build((short) 0); send(apiVersionsRequest.toSend(nextRequestHeader(ApiKeys.API_VERSIONS, apiVersionsRequest.version()))); setSaslState(SaslState.RECEIVE_APIVERSIONS_RESPONSE); break; case RECEIVE_APIVERSIONS_RESPONSE: ApiVersionsResponse apiVersionsResponse = (ApiVersionsResponse) receiveKafkaResponse(); if (apiVersionsResponse == null) break; else { setSaslAuthenticateAndHandshakeVersions(apiVersionsResponse); reauthInfo.apiVersionsResponseReceivedFromBroker = apiVersionsResponse; setSaslState(SaslState.SEND_HANDSHAKE_REQUEST); // Fall through to send handshake request with the latest supported version } case SEND_HANDSHAKE_REQUEST: sendHandshakeRequest(saslHandshakeVersion); setSaslState(SaslState.RECEIVE_HANDSHAKE_RESPONSE); break; case RECEIVE_HANDSHAKE_RESPONSE: SaslHandshakeResponse handshakeResponse = (SaslHandshakeResponse) receiveKafkaResponse(); if (handshakeResponse == null) break; else { handleSaslHandshakeResponse(handshakeResponse); setSaslState(SaslState.INITIAL); // Fall through and start SASL authentication using the configured client mechanism } case INITIAL: sendInitialToken(); setSaslState(SaslState.INTERMEDIATE); break; case REAUTH_PROCESS_ORIG_APIVERSIONS_RESPONSE: setSaslAuthenticateAndHandshakeVersions(reauthInfo.apiVersionsResponseFromOriginalAuthentication); setSaslState(SaslState.REAUTH_SEND_HANDSHAKE_REQUEST); // Will set immediately // Fall through to send handshake request with the latest supported version case REAUTH_SEND_HANDSHAKE_REQUEST: sendHandshakeRequest(saslHandshakeVersion); setSaslState(SaslState.REAUTH_RECEIVE_HANDSHAKE_OR_OTHER_RESPONSE); break; case REAUTH_RECEIVE_HANDSHAKE_OR_OTHER_RESPONSE: handshakeResponse = (SaslHandshakeResponse) receiveKafkaResponse(); if (handshakeResponse == null) break; handleSaslHandshakeResponse(handshakeResponse); setSaslState(SaslState.REAUTH_INITIAL); // Will set immediately /* * Fall through and start SASL authentication using the configured client * mechanism. Note that we have to either fall through or add a loop to enter * the switch statement again. We will fall through to avoid adding the loop and * therefore minimize the changes to authentication-related code due to the * changes related to re-authentication. */ case REAUTH_INITIAL: sendInitialToken(); setSaslState(SaslState.INTERMEDIATE); break; case INTERMEDIATE: byte[] serverToken = receiveToken(); boolean noResponsesPending = serverToken != null && !sendSaslClientToken(serverToken, false); // For versions without SASL_AUTHENTICATE header, SASL exchange may be complete after a token is sent to server. // For versions with SASL_AUTHENTICATE header, server always sends a response to each SASL_AUTHENTICATE request. if (saslClient.isComplete()) { if (saslAuthenticateVersion == DISABLE_KAFKA_SASL_AUTHENTICATE_HEADER || noResponsesPending) setSaslState(SaslState.COMPLETE); else setSaslState(SaslState.CLIENT_COMPLETE); } break; case CLIENT_COMPLETE: byte[] serverResponse = receiveToken(); if (serverResponse != null) setSaslState(SaslState.COMPLETE); break; case COMPLETE: break; case FAILED: // Should never get here since exception would have been propagated earlier throw new IllegalStateException("SASL handshake has already failed"); } } private void sendHandshakeRequest(short version) throws IOException { SaslHandshakeRequest handshakeRequest = createSaslHandshakeRequest(version); send(handshakeRequest.toSend(nextRequestHeader(ApiKeys.SASL_HANDSHAKE, handshakeRequest.version()))); } private void sendInitialToken() throws IOException { sendSaslClientToken(new byte[0], true); } @Override public void reauthenticate(ReauthenticationContext reauthenticationContext) throws IOException { SaslClientAuthenticator previousSaslClientAuthenticator = (SaslClientAuthenticator) Objects .requireNonNull(reauthenticationContext).previousAuthenticator(); ApiVersionsResponse apiVersionsResponseFromOriginalAuthentication = previousSaslClientAuthenticator.reauthInfo .apiVersionsResponse(); previousSaslClientAuthenticator.close(); reauthInfo.reauthenticating(apiVersionsResponseFromOriginalAuthentication, reauthenticationContext.reauthenticationBeginNanos()); netInBuffer = reauthenticationContext.networkReceive(); setSaslState(SaslState.REAUTH_PROCESS_ORIG_APIVERSIONS_RESPONSE); // Will set immediately authenticate(); } @Override public Optional<NetworkReceive> pollResponseReceivedDuringReauthentication() { return reauthInfo.pollResponseReceivedDuringReauthentication(); } @Override public Long clientSessionReauthenticationTimeNanos() { return reauthInfo.clientSessionReauthenticationTimeNanos; } @Override public Long reauthenticationLatencyMs() { return reauthInfo.reauthenticationLatencyMs(); } // visible for testing int nextCorrelationId() { if (!isReserved(correlationId)) correlationId = MIN_RESERVED_CORRELATION_ID; return correlationId++; } private RequestHeader nextRequestHeader(ApiKeys apiKey, short version) { String clientId = (String) configs.get(CommonClientConfigs.CLIENT_ID_CONFIG); short requestApiKey = apiKey.id; currentRequestHeader = new RequestHeader( new RequestHeaderData(). setRequestApiKey(requestApiKey). setRequestApiVersion(version). setClientId(clientId). setCorrelationId(nextCorrelationId()), apiKey.requestHeaderVersion(version)); return currentRequestHeader; } // Visible to override for testing protected SaslHandshakeRequest createSaslHandshakeRequest(short version) { return new SaslHandshakeRequest.Builder( new SaslHandshakeRequestData().setMechanism(mechanism)).build(version); } // Visible to override for testing protected void setSaslAuthenticateAndHandshakeVersions(ApiVersionsResponse apiVersionsResponse) { ApiVersion authenticateVersion = apiVersionsResponse.apiVersion(ApiKeys.SASL_AUTHENTICATE.id); if (authenticateVersion != null) { this.saslAuthenticateVersion = (short) Math.min(authenticateVersion.maxVersion(), ApiKeys.SASL_AUTHENTICATE.latestVersion()); } ApiVersion handshakeVersion = apiVersionsResponse.apiVersion(ApiKeys.SASL_HANDSHAKE.id); if (handshakeVersion != null) { this.saslHandshakeVersion = (short) Math.min(handshakeVersion.maxVersion(), ApiKeys.SASL_HANDSHAKE.latestVersion()); } } private void setSaslState(SaslState saslState) { if (netOutBuffer != null && !netOutBuffer.completed()) pendingSaslState = saslState; else { this.pendingSaslState = null; this.saslState = saslState; log.debug("Set SASL client state to {}", saslState); if (saslState == SaslState.COMPLETE) { reauthInfo.setAuthenticationEndAndSessionReauthenticationTimes(time.nanoseconds()); if (!reauthInfo.reauthenticating()) transportLayer.removeInterestOps(SelectionKey.OP_WRITE); else /* * Re-authentication is triggered by a write, so we have to make sure that * pending write is actually sent. */ transportLayer.addInterestOps(SelectionKey.OP_WRITE); } } } /** * Sends a SASL client token to server if required. This may be an initial token to start * SASL token exchange or response to a challenge from the server. * @return true if a token was sent to the server */ private boolean sendSaslClientToken(byte[] serverToken, boolean isInitial) throws IOException { if (!saslClient.isComplete()) { byte[] saslToken = createSaslToken(serverToken, isInitial); if (saslToken != null) { ByteBuffer tokenBuf = ByteBuffer.wrap(saslToken); Send send; if (saslAuthenticateVersion == DISABLE_KAFKA_SASL_AUTHENTICATE_HEADER) { send = ByteBufferSend.sizePrefixed(tokenBuf); } else { SaslAuthenticateRequestData data = new SaslAuthenticateRequestData() .setAuthBytes(tokenBuf.array()); SaslAuthenticateRequest request = new SaslAuthenticateRequest.Builder(data).build(saslAuthenticateVersion); send = request.toSend(nextRequestHeader(ApiKeys.SASL_AUTHENTICATE, saslAuthenticateVersion)); } send(send); return true; } } return false; } private void send(Send send) throws IOException { try { netOutBuffer = send; flushNetOutBufferAndUpdateInterestOps(); } catch (IOException e) { setSaslState(SaslState.FAILED); throw e; } } private boolean flushNetOutBufferAndUpdateInterestOps() throws IOException { boolean flushedCompletely = flushNetOutBuffer(); if (flushedCompletely) { transportLayer.removeInterestOps(SelectionKey.OP_WRITE); if (pendingSaslState != null) setSaslState(pendingSaslState); } else transportLayer.addInterestOps(SelectionKey.OP_WRITE); return flushedCompletely; } private byte[] receiveResponseOrToken() throws IOException { if (netInBuffer == null) netInBuffer = new NetworkReceive(node); netInBuffer.readFrom(transportLayer); byte[] serverPacket = null; if (netInBuffer.complete()) { netInBuffer.payload().rewind(); serverPacket = new byte[netInBuffer.payload().remaining()]; netInBuffer.payload().get(serverPacket, 0, serverPacket.length); netInBuffer = null; // reset the networkReceive as we read all the data. } return serverPacket; } public KafkaPrincipal principal() { return new KafkaPrincipal(KafkaPrincipal.USER_TYPE, clientPrincipalName); } @Override public Optional<KafkaPrincipalSerde> principalSerde() { return Optional.empty(); } public boolean complete() { return saslState == SaslState.COMPLETE; } public void close() throws IOException { if (saslClient != null) saslClient.dispose(); } private byte[] receiveToken() throws IOException { if (saslAuthenticateVersion == DISABLE_KAFKA_SASL_AUTHENTICATE_HEADER) { return receiveResponseOrToken(); } else { SaslAuthenticateResponse response = (SaslAuthenticateResponse) receiveKafkaResponse(); if (response != null) { Errors error = response.error(); if (error != Errors.NONE) { setSaslState(SaslState.FAILED); String errMsg = response.errorMessage(); throw errMsg == null ? error.exception() : error.exception(errMsg); } long sessionLifetimeMs = response.sessionLifetimeMs(); if (sessionLifetimeMs > 0L) reauthInfo.positiveSessionLifetimeMs = sessionLifetimeMs; return Utils.copyArray(response.saslAuthBytes()); } else return null; } } private byte[] createSaslToken(final byte[] saslToken, boolean isInitial) throws SaslException { if (saslToken == null) throw new IllegalSaslStateException("Error authenticating with the Kafka Broker: received a `null` saslToken."); try { if (isInitial && !saslClient.hasInitialResponse()) return saslToken; else return SecurityManagerCompatibility.get().callAs(subject, () -> saslClient.evaluateChallenge(saslToken)); } catch (CompletionException e) { String error = "An error: (" + e + ") occurred when evaluating SASL token received from the Kafka Broker."; KerberosError kerberosError = KerberosError.fromException(e); // Try to provide hints to use about what went wrong so they can fix their configuration. if (kerberosError == KerberosError.SERVER_NOT_FOUND) { error += " This may be caused by Java's being unable to resolve the Kafka Broker's" + " hostname correctly. You may want to try to adding" + " '-Dsun.net.spi.nameservice.provider.1=dns,sun' to your client's JVMFLAGS environment." + " Users must configure FQDN of kafka brokers when authenticating using SASL and" + " `socketChannel.socket().getInetAddress().getHostName()` must match the hostname in `principal/hostname@realm`"; } //Unwrap the SaslException Throwable cause = e.getCause(); // Treat transient Kerberos errors as non-fatal SaslExceptions that are processed as I/O exceptions // and all other failures as fatal SaslAuthenticationException. if ((kerberosError != null && kerberosError.retriable()) || (kerberosError == null && KerberosError.isRetriableClientGssException(e))) { error += " Kafka Client will retry."; throw new SaslException(error, cause); } else { error += " Kafka Client will go to AUTHENTICATION_FAILED state."; throw new SaslAuthenticationException(error, cause); } } } private boolean flushNetOutBuffer() throws IOException { if (!netOutBuffer.completed()) { netOutBuffer.writeTo(transportLayer); } return netOutBuffer.completed(); } private AbstractResponse receiveKafkaResponse() throws IOException { if (netInBuffer == null) netInBuffer = new NetworkReceive(node); NetworkReceive receive = netInBuffer; try { byte[] responseBytes = receiveResponseOrToken(); if (responseBytes == null) return null; else { AbstractResponse response = NetworkClient.parseResponse(ByteBuffer.wrap(responseBytes), currentRequestHeader); currentRequestHeader = null; return response; } } catch (BufferUnderflowException | SchemaException | IllegalArgumentException e) { /* * Account for the fact that during re-authentication there may be responses * arriving for requests that were sent in the past. */ if (reauthInfo.reauthenticating()) { /* * It didn't match the current request header, so it must be unrelated to * re-authentication. Save it so it can be processed later. */ receive.payload().rewind(); reauthInfo.pendingAuthenticatedReceives.add(receive); return null; } log.debug("Invalid SASL mechanism response, server may be expecting only GSSAPI tokens"); setSaslState(SaslState.FAILED); throw new IllegalSaslStateException("Invalid SASL mechanism response, server may be expecting a different protocol", e); } } private void handleSaslHandshakeResponse(SaslHandshakeResponse response) { Errors error = response.error(); if (error != Errors.NONE) setSaslState(SaslState.FAILED); switch (error) { case NONE: break; case UNSUPPORTED_SASL_MECHANISM: throw new UnsupportedSaslMechanismException(String.format("Client SASL mechanism '%s' not enabled in the server, enabled mechanisms are %s", mechanism, response.enabledMechanisms())); case ILLEGAL_SASL_STATE: throw new IllegalSaslStateException(String.format("Unexpected handshake request with client mechanism %s, enabled mechanisms are %s", mechanism, response.enabledMechanisms())); default: throw new IllegalSaslStateException(String.format("Unknown error code %s, client mechanism is %s, enabled mechanisms are %s", response.error(), mechanism, response.enabledMechanisms())); } } /** * Returns the first Principal from Subject. * @throws KafkaException if there are no Principals in the Subject. * During Kerberos re-login, principal is reset on Subject. An exception is * thrown so that the connection is retried after any configured backoff. */ public static String firstPrincipal(Subject subject) { Set<Principal> principals = subject.getPrincipals(); synchronized (principals) { Iterator<Principal> iterator = principals.iterator(); if (iterator.hasNext()) return iterator.next().getName(); else throw new KafkaException("Principal could not be determined from Subject, this may be a transient failure due to Kerberos re-login"); } } /** * Information related to re-authentication */ private class ReauthInfo { public ApiVersionsResponse apiVersionsResponseFromOriginalAuthentication; public long reauthenticationBeginNanos; public List<NetworkReceive> pendingAuthenticatedReceives = new ArrayList<>(); public ApiVersionsResponse apiVersionsResponseReceivedFromBroker; public Long positiveSessionLifetimeMs; public long authenticationEndNanos; public Long clientSessionReauthenticationTimeNanos; public void reauthenticating(ApiVersionsResponse apiVersionsResponseFromOriginalAuthentication, long reauthenticationBeginNanos) { this.apiVersionsResponseFromOriginalAuthentication = Objects .requireNonNull(apiVersionsResponseFromOriginalAuthentication); this.reauthenticationBeginNanos = reauthenticationBeginNanos; } public boolean reauthenticating() { return apiVersionsResponseFromOriginalAuthentication != null; } public ApiVersionsResponse apiVersionsResponse() { return reauthenticating() ? apiVersionsResponseFromOriginalAuthentication : apiVersionsResponseReceivedFromBroker; } /** * Return the (always non-null but possibly empty) NetworkReceive response that * arrived during re-authentication that is unrelated to re-authentication, if * any. This corresponds to a request sent prior to the beginning of * re-authentication; the request was made when the channel was successfully * authenticated, and the response arrived during the re-authentication * process. * * @return the (always non-null but possibly empty) NetworkReceive response * that arrived during re-authentication that is unrelated to * re-authentication, if any */ public Optional<NetworkReceive> pollResponseReceivedDuringReauthentication() { if (pendingAuthenticatedReceives.isEmpty()) return Optional.empty(); return Optional.of(pendingAuthenticatedReceives.remove(0)); } public void setAuthenticationEndAndSessionReauthenticationTimes(long nowNanos) { authenticationEndNanos = nowNanos; long sessionLifetimeMsToUse; if (positiveSessionLifetimeMs != null) { // pick a random percentage between 85% and 95% for session re-authentication double pctWindowFactorToTakeNetworkLatencyAndClockDriftIntoAccount = 0.85; double pctWindowJitterToAvoidReauthenticationStormAcrossManyChannelsSimultaneously = 0.10; double pctToUse = pctWindowFactorToTakeNetworkLatencyAndClockDriftIntoAccount + RNG.nextDouble() * pctWindowJitterToAvoidReauthenticationStormAcrossManyChannelsSimultaneously; sessionLifetimeMsToUse = (long) (positiveSessionLifetimeMs * pctToUse); clientSessionReauthenticationTimeNanos = Math.addExact(authenticationEndNanos, Utils.msToNs(sessionLifetimeMsToUse)); log.debug( "Finished {} with session expiration in {} ms and session re-authentication on or after {} ms", authenticationOrReauthenticationText(), positiveSessionLifetimeMs, sessionLifetimeMsToUse); } else log.debug("Finished {} with no session expiration and no session re-authentication", authenticationOrReauthenticationText()); } public Long reauthenticationLatencyMs() { return reauthenticating() ? Math.round((authenticationEndNanos - reauthenticationBeginNanos) / 1000.0 / 1000.0) : null; } private String authenticationOrReauthenticationText() { return reauthenticating() ? "re-authentication" : "authentication"; } } }
googleapis/google-cloud-java
35,240
java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/ListNoteOccurrencesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1beta1; /** * * * <pre> * Response for listing occurrences for a note. * </pre> * * Protobuf type {@code grafeas.v1beta1.ListNoteOccurrencesResponse} */ public final class ListNoteOccurrencesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1beta1.ListNoteOccurrencesResponse) ListNoteOccurrencesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListNoteOccurrencesResponse.newBuilder() to construct. private ListNoteOccurrencesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListNoteOccurrencesResponse() { occurrences_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListNoteOccurrencesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.ListNoteOccurrencesResponse.class, io.grafeas.v1beta1.ListNoteOccurrencesResponse.Builder.class); } public static final int OCCURRENCES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<io.grafeas.v1beta1.Occurrence> occurrences_; /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ @java.lang.Override public java.util.List<io.grafeas.v1beta1.Occurrence> getOccurrencesList() { return occurrences_; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ @java.lang.Override public java.util.List<? extends io.grafeas.v1beta1.OccurrenceOrBuilder> getOccurrencesOrBuilderList() { return occurrences_; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ @java.lang.Override public int getOccurrencesCount() { return occurrences_.size(); } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ @java.lang.Override public io.grafeas.v1beta1.Occurrence getOccurrences(int index) { return occurrences_.get(index); } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ @java.lang.Override public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) { return occurrences_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < occurrences_.size(); i++) { output.writeMessage(1, occurrences_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < occurrences_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, occurrences_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1beta1.ListNoteOccurrencesResponse)) { return super.equals(obj); } io.grafeas.v1beta1.ListNoteOccurrencesResponse other = (io.grafeas.v1beta1.ListNoteOccurrencesResponse) obj; if (!getOccurrencesList().equals(other.getOccurrencesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getOccurrencesCount() > 0) { hash = (37 * hash) + OCCURRENCES_FIELD_NUMBER; hash = (53 * hash) + getOccurrencesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1beta1.ListNoteOccurrencesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for listing occurrences for a note. * </pre> * * Protobuf type {@code grafeas.v1beta1.ListNoteOccurrencesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1beta1.ListNoteOccurrencesResponse) io.grafeas.v1beta1.ListNoteOccurrencesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.ListNoteOccurrencesResponse.class, io.grafeas.v1beta1.ListNoteOccurrencesResponse.Builder.class); } // Construct using io.grafeas.v1beta1.ListNoteOccurrencesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (occurrencesBuilder_ == null) { occurrences_ = java.util.Collections.emptyList(); } else { occurrences_ = null; occurrencesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_descriptor; } @java.lang.Override public io.grafeas.v1beta1.ListNoteOccurrencesResponse getDefaultInstanceForType() { return io.grafeas.v1beta1.ListNoteOccurrencesResponse.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1beta1.ListNoteOccurrencesResponse build() { io.grafeas.v1beta1.ListNoteOccurrencesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1beta1.ListNoteOccurrencesResponse buildPartial() { io.grafeas.v1beta1.ListNoteOccurrencesResponse result = new io.grafeas.v1beta1.ListNoteOccurrencesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(io.grafeas.v1beta1.ListNoteOccurrencesResponse result) { if (occurrencesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { occurrences_ = java.util.Collections.unmodifiableList(occurrences_); bitField0_ = (bitField0_ & ~0x00000001); } result.occurrences_ = occurrences_; } else { result.occurrences_ = occurrencesBuilder_.build(); } } private void buildPartial0(io.grafeas.v1beta1.ListNoteOccurrencesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1beta1.ListNoteOccurrencesResponse) { return mergeFrom((io.grafeas.v1beta1.ListNoteOccurrencesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1beta1.ListNoteOccurrencesResponse other) { if (other == io.grafeas.v1beta1.ListNoteOccurrencesResponse.getDefaultInstance()) return this; if (occurrencesBuilder_ == null) { if (!other.occurrences_.isEmpty()) { if (occurrences_.isEmpty()) { occurrences_ = other.occurrences_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureOccurrencesIsMutable(); occurrences_.addAll(other.occurrences_); } onChanged(); } } else { if (!other.occurrences_.isEmpty()) { if (occurrencesBuilder_.isEmpty()) { occurrencesBuilder_.dispose(); occurrencesBuilder_ = null; occurrences_ = other.occurrences_; bitField0_ = (bitField0_ & ~0x00000001); occurrencesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getOccurrencesFieldBuilder() : null; } else { occurrencesBuilder_.addAllMessages(other.occurrences_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { io.grafeas.v1beta1.Occurrence m = input.readMessage(io.grafeas.v1beta1.Occurrence.parser(), extensionRegistry); if (occurrencesBuilder_ == null) { ensureOccurrencesIsMutable(); occurrences_.add(m); } else { occurrencesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<io.grafeas.v1beta1.Occurrence> occurrences_ = java.util.Collections.emptyList(); private void ensureOccurrencesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { occurrences_ = new java.util.ArrayList<io.grafeas.v1beta1.Occurrence>(occurrences_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< io.grafeas.v1beta1.Occurrence, io.grafeas.v1beta1.Occurrence.Builder, io.grafeas.v1beta1.OccurrenceOrBuilder> occurrencesBuilder_; /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public java.util.List<io.grafeas.v1beta1.Occurrence> getOccurrencesList() { if (occurrencesBuilder_ == null) { return java.util.Collections.unmodifiableList(occurrences_); } else { return occurrencesBuilder_.getMessageList(); } } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public int getOccurrencesCount() { if (occurrencesBuilder_ == null) { return occurrences_.size(); } else { return occurrencesBuilder_.getCount(); } } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public io.grafeas.v1beta1.Occurrence getOccurrences(int index) { if (occurrencesBuilder_ == null) { return occurrences_.get(index); } else { return occurrencesBuilder_.getMessage(index); } } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder setOccurrences(int index, io.grafeas.v1beta1.Occurrence value) { if (occurrencesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOccurrencesIsMutable(); occurrences_.set(index, value); onChanged(); } else { occurrencesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder setOccurrences( int index, io.grafeas.v1beta1.Occurrence.Builder builderForValue) { if (occurrencesBuilder_ == null) { ensureOccurrencesIsMutable(); occurrences_.set(index, builderForValue.build()); onChanged(); } else { occurrencesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder addOccurrences(io.grafeas.v1beta1.Occurrence value) { if (occurrencesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOccurrencesIsMutable(); occurrences_.add(value); onChanged(); } else { occurrencesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder addOccurrences(int index, io.grafeas.v1beta1.Occurrence value) { if (occurrencesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureOccurrencesIsMutable(); occurrences_.add(index, value); onChanged(); } else { occurrencesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder addOccurrences(io.grafeas.v1beta1.Occurrence.Builder builderForValue) { if (occurrencesBuilder_ == null) { ensureOccurrencesIsMutable(); occurrences_.add(builderForValue.build()); onChanged(); } else { occurrencesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder addOccurrences( int index, io.grafeas.v1beta1.Occurrence.Builder builderForValue) { if (occurrencesBuilder_ == null) { ensureOccurrencesIsMutable(); occurrences_.add(index, builderForValue.build()); onChanged(); } else { occurrencesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder addAllOccurrences( java.lang.Iterable<? extends io.grafeas.v1beta1.Occurrence> values) { if (occurrencesBuilder_ == null) { ensureOccurrencesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, occurrences_); onChanged(); } else { occurrencesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder clearOccurrences() { if (occurrencesBuilder_ == null) { occurrences_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { occurrencesBuilder_.clear(); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public Builder removeOccurrences(int index) { if (occurrencesBuilder_ == null) { ensureOccurrencesIsMutable(); occurrences_.remove(index); onChanged(); } else { occurrencesBuilder_.remove(index); } return this; } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public io.grafeas.v1beta1.Occurrence.Builder getOccurrencesBuilder(int index) { return getOccurrencesFieldBuilder().getBuilder(index); } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrencesOrBuilder(int index) { if (occurrencesBuilder_ == null) { return occurrences_.get(index); } else { return occurrencesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public java.util.List<? extends io.grafeas.v1beta1.OccurrenceOrBuilder> getOccurrencesOrBuilderList() { if (occurrencesBuilder_ != null) { return occurrencesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(occurrences_); } } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public io.grafeas.v1beta1.Occurrence.Builder addOccurrencesBuilder() { return getOccurrencesFieldBuilder() .addBuilder(io.grafeas.v1beta1.Occurrence.getDefaultInstance()); } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public io.grafeas.v1beta1.Occurrence.Builder addOccurrencesBuilder(int index) { return getOccurrencesFieldBuilder() .addBuilder(index, io.grafeas.v1beta1.Occurrence.getDefaultInstance()); } /** * * * <pre> * The occurrences attached to the specified note. * </pre> * * <code>repeated .grafeas.v1beta1.Occurrence occurrences = 1;</code> */ public java.util.List<io.grafeas.v1beta1.Occurrence.Builder> getOccurrencesBuilderList() { return getOccurrencesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< io.grafeas.v1beta1.Occurrence, io.grafeas.v1beta1.Occurrence.Builder, io.grafeas.v1beta1.OccurrenceOrBuilder> getOccurrencesFieldBuilder() { if (occurrencesBuilder_ == null) { occurrencesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< io.grafeas.v1beta1.Occurrence, io.grafeas.v1beta1.Occurrence.Builder, io.grafeas.v1beta1.OccurrenceOrBuilder>( occurrences_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); occurrences_ = null; } return occurrencesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to provide to skip to a particular spot in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1beta1.ListNoteOccurrencesResponse) } // @@protoc_insertion_point(class_scope:grafeas.v1beta1.ListNoteOccurrencesResponse) private static final io.grafeas.v1beta1.ListNoteOccurrencesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1beta1.ListNoteOccurrencesResponse(); } public static io.grafeas.v1beta1.ListNoteOccurrencesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListNoteOccurrencesResponse> PARSER = new com.google.protobuf.AbstractParser<ListNoteOccurrencesResponse>() { @java.lang.Override public ListNoteOccurrencesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListNoteOccurrencesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListNoteOccurrencesResponse> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1beta1.ListNoteOccurrencesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/stanbol
35,044
rules/manager/src/main/java/org/apache/stanbol/rules/manager/parse/RuleParserImplTokenManager.java
/* Generated By:JavaCC: Do not edit this line. RuleParserImplTokenManager.java */ package org.apache.stanbol.rules.manager.parse; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.net.URI; import java.net.URISyntaxException; import org.apache.clerezza.commons.rdf.IRI; import org.apache.stanbol.rules.base.api.Rule; import org.apache.stanbol.rules.base.api.RuleAtom; import org.apache.stanbol.rules.base.api.util.AtomList; import org.apache.stanbol.rules.manager.KB; import org.apache.stanbol.rules.manager.RuleImpl; import org.apache.stanbol.rules.manager.atoms.ClassAtom; import org.apache.stanbol.rules.manager.atoms.ComparisonAtom; import org.apache.stanbol.rules.manager.atoms.ConcatAtom; import org.apache.stanbol.rules.manager.atoms.CreateLabelAtom; import org.apache.stanbol.rules.manager.atoms.DatavaluedPropertyAtom; import org.apache.stanbol.rules.manager.atoms.DifferentAtom; import org.apache.stanbol.rules.manager.atoms.DivisionAtom; import org.apache.stanbol.rules.manager.atoms.EndsWithAtom; import org.apache.stanbol.rules.manager.atoms.ExpressionAtom; import org.apache.stanbol.rules.manager.atoms.GreaterEqualThanAtom; import org.apache.stanbol.rules.manager.atoms.GreaterThanAtom; import org.apache.stanbol.rules.manager.atoms.IObjectAtom; import org.apache.stanbol.rules.manager.atoms.IndividualPropertyAtom; import org.apache.stanbol.rules.manager.atoms.IsBlankAtom; import org.apache.stanbol.rules.manager.atoms.LengthAtom; import org.apache.stanbol.rules.manager.atoms.LessEqualThanAtom; import org.apache.stanbol.rules.manager.atoms.LessThanAtom; import org.apache.stanbol.rules.manager.atoms.LetAtom; import org.apache.stanbol.rules.manager.atoms.LocalNameAtom; import org.apache.stanbol.rules.manager.atoms.LowerCaseAtom; import org.apache.stanbol.rules.manager.atoms.MultiplicationAtom; import org.apache.stanbol.rules.manager.atoms.NamespaceAtom; import org.apache.stanbol.rules.manager.atoms.NewIRIAtom; import org.apache.stanbol.rules.manager.atoms.NewLiteralAtom; import org.apache.stanbol.rules.manager.atoms.NotAtom; import org.apache.stanbol.rules.manager.atoms.NumberAtom; import org.apache.stanbol.rules.manager.atoms.NumericFunctionAtom; import org.apache.stanbol.rules.manager.atoms.NumericVariableAtom; import org.apache.stanbol.rules.manager.atoms.PropStringAtom; import org.apache.stanbol.rules.manager.atoms.ResourceAtom; import org.apache.stanbol.rules.manager.atoms.RuleBlankNode; import org.apache.stanbol.rules.manager.atoms.SameAtom; import org.apache.stanbol.rules.manager.atoms.StartsWithAtom; import org.apache.stanbol.rules.manager.atoms.StrAtom; import org.apache.stanbol.rules.manager.atoms.StringAtom; import org.apache.stanbol.rules.manager.atoms.StringFunctionAtom; import org.apache.stanbol.rules.manager.atoms.StringVariableAtom; import org.apache.stanbol.rules.manager.atoms.SubstringAtom; import org.apache.stanbol.rules.manager.atoms.SubtractionAtom; import org.apache.stanbol.rules.manager.atoms.SumAtom; import org.apache.stanbol.rules.manager.atoms.TypedLiteralAtom; import org.apache.stanbol.rules.manager.atoms.UnionAtom; import org.apache.stanbol.rules.manager.atoms.UpperCaseAtom; import org.apache.stanbol.rules.manager.atoms.VariableAtom; import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.Resource; /** Token Manager. */ public class RuleParserImplTokenManager implements RuleParserImplConstants { /** Debug output. */ public java.io.PrintStream debugStream = System.out; /** Set debug output. */ public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; } private final int jjStopStringLiteralDfa_0(int pos, long active0) { switch (pos) { case 0: if ((active0 & 0x3dffffffe100L) != 0L) { jjmatchedKind = 52; return 1; } if ((active0 & 0x1000000000000L) != 0L) return 8; if ((active0 & 0x800L) != 0L) return 1; return -1; case 1: if ((active0 & 0x1dfffff66000L) != 0L) { if (jjmatchedPos != 1) { jjmatchedKind = 52; jjmatchedPos = 1; } return 1; } if ((active0 & 0x200000098000L) != 0L) return 1; return -1; case 2: if ((active0 & 0x3cc8af706000L) != 0L) { if (jjmatchedPos != 2) { jjmatchedKind = 52; jjmatchedPos = 2; } return 1; } if ((active0 & 0x13750860000L) != 0L) return 1; return -1; case 3: if ((active0 & 0x2cc1aff04000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 3; return 1; } if ((active0 & 0x100800002000L) != 0L) return 1; return -1; case 4: if ((active0 & 0x40100000000L) != 0L) return 1; if ((active0 & 0x28c0aff04000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 4; return 1; } return -1; case 5: if ((active0 & 0xa0500000L) != 0L) return 1; if ((active0 & 0x28c00fa04000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 5; return 1; } return -1; case 6: if ((active0 & 0x200000000000L) != 0L) return 1; if ((active0 & 0x8c00fa04000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 6; return 1; } return -1; case 7: if ((active0 & 0x8000000L) != 0L) return 1; if ((active0 & 0x8c007a04000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 7; return 1; } return -1; case 8: if ((active0 & 0xc003804000L) != 0L) return 1; if ((active0 & 0x80004200000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 8; return 1; } return -1; case 9: if ((active0 & 0x4200000L) != 0L) return 1; if ((active0 & 0x80000000000L) != 0L) { jjmatchedKind = 52; jjmatchedPos = 9; return 1; } return -1; default : return -1; } } private final int jjStartNfa_0(int pos, long active0) { return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1); } private int jjStopAtPos(int pos, int kind) { jjmatchedKind = kind; jjmatchedPos = pos; return pos + 1; } private int jjMoveStringLiteralDfa0_0() { switch(curChar) { case 34: return jjStartNfaWithStates_0(0, 48, 8); case 40: return jjStopAtPos(0, 46); case 41: return jjStopAtPos(0, 47); case 44: return jjStopAtPos(0, 12); case 45: return jjMoveStringLiteralDfa1_0(0x100L); case 46: return jjStartNfaWithStates_0(0, 11, 1); case 47: return jjMoveStringLiteralDfa1_0(0x20L); case 58: return jjStopAtPos(0, 9); case 61: return jjStopAtPos(0, 10); case 91: return jjStopAtPos(0, 49); case 93: return jjStopAtPos(0, 50); case 94: return jjStopAtPos(0, 41); case 99: return jjMoveStringLiteralDfa1_0(0x80020000000L); case 100: return jjMoveStringLiteralDfa1_0(0x1000004000L); case 101: return jjMoveStringLiteralDfa1_0(0x8000000L); case 103: return jjMoveStringLiteralDfa1_0(0x50000L); case 104: return jjMoveStringLiteralDfa1_0(0x40000000L); case 105: return jjMoveStringLiteralDfa1_0(0x200000080000L); case 108: return jjMoveStringLiteralDfa1_0(0x8012428000L); case 109: return jjMoveStringLiteralDfa1_0(0x800000000L); case 110: return jjMoveStringLiteralDfa1_0(0x6100300000L); case 112: return jjMoveStringLiteralDfa1_0(0x100000000000L); case 115: return jjMoveStringLiteralDfa1_0(0x10604802000L); case 117: return jjMoveStringLiteralDfa1_0(0x40001000000L); case 118: return jjMoveStringLiteralDfa1_0(0x80000000L); default : return jjMoveNfa_0(2, 0); } } private int jjMoveStringLiteralDfa1_0(long active0) { try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(0, active0); return 1; } switch(curChar) { case 42: if ((active0 & 0x20L) != 0L) return jjStopAtPos(1, 5); break; case 62: if ((active0 & 0x100L) != 0L) return jjStopAtPos(1, 8); break; case 97: return jjMoveStringLiteralDfa2_0(active0, 0x40c0002000L); case 101: return jjMoveStringLiteralDfa2_0(active0, 0x10760000L); case 105: return jjMoveStringLiteralDfa2_0(active0, 0x1000004000L); case 110: return jjMoveStringLiteralDfa2_0(active0, 0x40008000000L); case 111: return jjMoveStringLiteralDfa2_0(active0, 0xa122000000L); case 112: return jjMoveStringLiteralDfa2_0(active0, 0x1000000L); case 114: return jjMoveStringLiteralDfa2_0(active0, 0x180000000000L); case 115: if ((active0 & 0x80000L) != 0L) { jjmatchedKind = 19; jjmatchedPos = 1; } return jjMoveStringLiteralDfa2_0(active0, 0x200000000000L); case 116: if ((active0 & 0x8000L) != 0L) return jjStartNfaWithStates_0(1, 15, 1); else if ((active0 & 0x10000L) != 0L) return jjStartNfaWithStates_0(1, 16, 1); return jjMoveStringLiteralDfa2_0(active0, 0x10004000000L); case 117: return jjMoveStringLiteralDfa2_0(active0, 0xe00800000L); default : break; } return jjStartNfa_0(0, active0); } private int jjMoveStringLiteralDfa2_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(0, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(1, active0); return 2; } switch(curChar) { case 66: return jjMoveStringLiteralDfa3_0(active0, 0x200000000000L); case 97: return jjMoveStringLiteralDfa3_0(active0, 0x4000000L); case 98: if ((active0 & 0x400000000L) != 0L) { jjmatchedKind = 34; jjmatchedPos = 2; } return jjMoveStringLiteralDfa3_0(active0, 0x800000L); case 99: return jjMoveStringLiteralDfa3_0(active0, 0x8000000000L); case 100: return jjMoveStringLiteralDfa3_0(active0, 0x8000000L); case 101: return jjMoveStringLiteralDfa3_0(active0, 0x80000000000L); case 102: return jjMoveStringLiteralDfa3_0(active0, 0x4000L); case 105: return jjMoveStringLiteralDfa3_0(active0, 0x40000000000L); case 108: return jjMoveStringLiteralDfa3_0(active0, 0x880000000L); case 109: if ((active0 & 0x200000000L) != 0L) return jjStartNfaWithStates_0(2, 33, 1); return jjMoveStringLiteralDfa3_0(active0, 0x4000002000L); case 110: return jjMoveStringLiteralDfa3_0(active0, 0x20400000L); case 111: return jjMoveStringLiteralDfa3_0(active0, 0x100000000000L); case 112: return jjMoveStringLiteralDfa3_0(active0, 0x1000000L); case 113: if ((active0 & 0x20000L) != 0L) return jjStartNfaWithStates_0(2, 17, 1); else if ((active0 & 0x40000L) != 0L) return jjStartNfaWithStates_0(2, 18, 1); break; case 114: if ((active0 & 0x10000000000L) != 0L) return jjStartNfaWithStates_0(2, 40, 1); break; case 115: if ((active0 & 0x40000000L) != 0L) return jjStartNfaWithStates_0(2, 30, 1); break; case 116: if ((active0 & 0x10000000L) != 0L) return jjStartNfaWithStates_0(2, 28, 1); else if ((active0 & 0x2000000000L) != 0L) { jjmatchedKind = 37; jjmatchedPos = 2; } return jjMoveStringLiteralDfa3_0(active0, 0x100000000L); case 118: if ((active0 & 0x1000000000L) != 0L) return jjStartNfaWithStates_0(2, 36, 1); break; case 119: return jjMoveStringLiteralDfa3_0(active0, 0x2300000L); default : break; } return jjStartNfa_0(1, active0); } private int jjMoveStringLiteralDfa3_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(1, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(2, active0); return 3; } switch(curChar) { case 73: return jjMoveStringLiteralDfa4_0(active0, 0x100000L); case 76: return jjMoveStringLiteralDfa4_0(active0, 0x200000L); case 97: return jjMoveStringLiteralDfa4_0(active0, 0x88000000000L); case 99: return jjMoveStringLiteralDfa4_0(active0, 0x20000000L); case 101: if ((active0 & 0x2000L) != 0L) return jjStartNfaWithStates_0(3, 13, 1); return jjMoveStringLiteralDfa4_0(active0, 0x4103000000L); case 102: return jjMoveStringLiteralDfa4_0(active0, 0x4000L); case 103: return jjMoveStringLiteralDfa4_0(active0, 0x400000L); case 108: return jjMoveStringLiteralDfa4_0(active0, 0x200000000000L); case 111: return jjMoveStringLiteralDfa4_0(active0, 0x40000000000L); case 112: if ((active0 & 0x100000000000L) != 0L) return jjStartNfaWithStates_0(3, 44, 1); break; case 114: return jjMoveStringLiteralDfa4_0(active0, 0x4000000L); case 115: return jjMoveStringLiteralDfa4_0(active0, 0x8800000L); case 116: if ((active0 & 0x800000000L) != 0L) return jjStartNfaWithStates_0(3, 35, 1); break; case 117: return jjMoveStringLiteralDfa4_0(active0, 0x80000000L); default : break; } return jjStartNfa_0(2, active0); } private int jjMoveStringLiteralDfa4_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(2, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(3, active0); return 4; } switch(curChar) { case 82: return jjMoveStringLiteralDfa5_0(active0, 0x100000L); case 87: return jjMoveStringLiteralDfa5_0(active0, 0x8000000L); case 97: return jjMoveStringLiteralDfa5_0(active0, 0x200020000000L); case 101: return jjMoveStringLiteralDfa5_0(active0, 0x80004000L); case 105: return jjMoveStringLiteralDfa5_0(active0, 0x200000L); case 108: return jjMoveStringLiteralDfa5_0(active0, 0x8000000000L); case 110: if ((active0 & 0x40000000000L) != 0L) return jjStartNfaWithStates_0(4, 42, 1); break; case 114: return jjMoveStringLiteralDfa5_0(active0, 0x3000000L); case 115: return jjMoveStringLiteralDfa5_0(active0, 0x4000000000L); case 116: return jjMoveStringLiteralDfa5_0(active0, 0x80004c00000L); case 120: if ((active0 & 0x100000000L) != 0L) return jjStartNfaWithStates_0(4, 32, 1); break; default : break; } return jjStartNfa_0(3, active0); } private int jjMoveStringLiteralDfa5_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(3, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(4, active0); return 5; } switch(curChar) { case 67: return jjMoveStringLiteralDfa6_0(active0, 0x3000000L); case 73: if ((active0 & 0x100000L) != 0L) return jjStartNfaWithStates_0(5, 20, 1); break; case 101: return jjMoveStringLiteralDfa6_0(active0, 0x80000000000L); case 104: if ((active0 & 0x400000L) != 0L) return jjStartNfaWithStates_0(5, 22, 1); break; case 105: return jjMoveStringLiteralDfa6_0(active0, 0x8000000L); case 110: return jjMoveStringLiteralDfa6_0(active0, 0x208000000000L); case 112: return jjMoveStringLiteralDfa6_0(active0, 0x4000000000L); case 114: return jjMoveStringLiteralDfa6_0(active0, 0x804000L); case 115: if ((active0 & 0x80000000L) != 0L) return jjStartNfaWithStates_0(5, 31, 1); return jjMoveStringLiteralDfa6_0(active0, 0x4000000L); case 116: if ((active0 & 0x20000000L) != 0L) return jjStartNfaWithStates_0(5, 29, 1); return jjMoveStringLiteralDfa6_0(active0, 0x200000L); default : break; } return jjStartNfa_0(4, active0); } private int jjMoveStringLiteralDfa6_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(4, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(5, active0); return 6; } switch(curChar) { case 76: return jjMoveStringLiteralDfa7_0(active0, 0x80000000000L); case 87: return jjMoveStringLiteralDfa7_0(active0, 0x4000000L); case 97: return jjMoveStringLiteralDfa7_0(active0, 0xc003000000L); case 101: return jjMoveStringLiteralDfa7_0(active0, 0x204000L); case 105: return jjMoveStringLiteralDfa7_0(active0, 0x800000L); case 107: if ((active0 & 0x200000000000L) != 0L) return jjStartNfaWithStates_0(6, 45, 1); break; case 116: return jjMoveStringLiteralDfa7_0(active0, 0x8000000L); default : break; } return jjStartNfa_0(5, active0); } private int jjMoveStringLiteralDfa7_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(5, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(6, active0); return 7; } switch(curChar) { case 97: return jjMoveStringLiteralDfa8_0(active0, 0x80000000000L); case 99: return jjMoveStringLiteralDfa8_0(active0, 0x4000000000L); case 104: if ((active0 & 0x8000000L) != 0L) return jjStartNfaWithStates_0(7, 27, 1); break; case 105: return jjMoveStringLiteralDfa8_0(active0, 0x4000000L); case 109: return jjMoveStringLiteralDfa8_0(active0, 0x8000000000L); case 110: return jjMoveStringLiteralDfa8_0(active0, 0x804000L); case 114: return jjMoveStringLiteralDfa8_0(active0, 0x200000L); case 115: return jjMoveStringLiteralDfa8_0(active0, 0x3000000L); default : break; } return jjStartNfa_0(6, active0); } private int jjMoveStringLiteralDfa8_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(6, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(7, active0); return 8; } switch(curChar) { case 97: return jjMoveStringLiteralDfa9_0(active0, 0x200000L); case 98: return jjMoveStringLiteralDfa9_0(active0, 0x80000000000L); case 101: if ((active0 & 0x1000000L) != 0L) return jjStartNfaWithStates_0(8, 24, 1); else if ((active0 & 0x2000000L) != 0L) return jjStartNfaWithStates_0(8, 25, 1); else if ((active0 & 0x4000000000L) != 0L) return jjStartNfaWithStates_0(8, 38, 1); else if ((active0 & 0x8000000000L) != 0L) return jjStartNfaWithStates_0(8, 39, 1); break; case 103: if ((active0 & 0x800000L) != 0L) return jjStartNfaWithStates_0(8, 23, 1); break; case 116: if ((active0 & 0x4000L) != 0L) return jjStartNfaWithStates_0(8, 14, 1); return jjMoveStringLiteralDfa9_0(active0, 0x4000000L); default : break; } return jjStartNfa_0(7, active0); } private int jjMoveStringLiteralDfa9_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(7, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(8, active0); return 9; } switch(curChar) { case 101: return jjMoveStringLiteralDfa10_0(active0, 0x80000000000L); case 104: if ((active0 & 0x4000000L) != 0L) return jjStartNfaWithStates_0(9, 26, 1); break; case 108: if ((active0 & 0x200000L) != 0L) return jjStartNfaWithStates_0(9, 21, 1); break; default : break; } return jjStartNfa_0(8, active0); } private int jjMoveStringLiteralDfa10_0(long old0, long active0) { if (((active0 &= old0)) == 0L) return jjStartNfa_0(8, old0); try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { jjStopStringLiteralDfa_0(9, active0); return 10; } switch(curChar) { case 108: if ((active0 & 0x80000000000L) != 0L) return jjStartNfaWithStates_0(10, 43, 1); break; default : break; } return jjStartNfa_0(9, active0); } private int jjStartNfaWithStates_0(int pos, int kind, int state) { jjmatchedKind = kind; jjmatchedPos = pos; try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { return pos + 1; } return jjMoveNfa_0(state, pos + 1); } private int jjMoveNfa_0(int startState, int curPos) { int startsAt = 0; jjnewStateCnt = 13; int i = 1; jjstateSet[0] = startState; int kind = 0x7fffffff; for (;;) { if (++jjround == 0x7fffffff) ReInitRounds(); if (curChar < 64) { long l = 1L << curChar; do { switch(jjstateSet[--i]) { case 2: if ((0x3ff600000000000L & l) != 0L) { if (kind > 52) kind = 52; jjCheckNAdd(1); } else if (curChar == 34) jjCheckNAdd(8); else if (curChar == 60) jjCheckNAdd(5); else if (curChar == 63) jjCheckNAdd(3); if ((0x3ff000000000000L & l) != 0L) { if (kind > 51) kind = 51; jjCheckNAdd(0); } break; case 0: if ((0x3ff000000000000L & l) == 0L) break; if (kind > 51) kind = 51; jjCheckNAdd(0); break; case 1: if ((0x3ff600000000000L & l) == 0L) break; if (kind > 52) kind = 52; jjCheckNAdd(1); break; case 3: if ((0x3ff200000000000L & l) == 0L) break; if (kind > 53) kind = 53; jjCheckNAdd(3); break; case 4: if (curChar == 60) jjCheckNAdd(5); break; case 5: if ((0x7ffe33800000000L & l) != 0L) jjCheckNAddTwoStates(5, 6); break; case 6: if (curChar == 62) kind = 54; break; case 7: if (curChar == 34) jjCheckNAdd(8); break; case 8: if ((0x87ffe03b00000000L & l) != 0L) jjCheckNAddTwoStates(8, 9); break; case 9: if (curChar == 34 && kind > 55) kind = 55; break; case 10: if (curChar == 58) jjCheckNAdd(11); break; case 11: if ((0x3ff600000000000L & l) == 0L) break; if (kind > 56) kind = 56; jjCheckNAdd(11); break; default : break; } } while(i != startsAt); } else if (curChar < 128) { long l = 1L << (curChar & 077); do { switch(jjstateSet[--i]) { case 2: if ((0x7fffffe87fffffeL & l) != 0L) { if (kind > 52) kind = 52; jjCheckNAdd(1); } if (curChar == 95) jjstateSet[jjnewStateCnt++] = 10; break; case 1: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 52) kind = 52; jjCheckNAdd(1); break; case 3: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 53) kind = 53; jjstateSet[jjnewStateCnt++] = 3; break; case 5: if ((0x7fffffe87fffffeL & l) != 0L) jjAddStates(0, 1); break; case 8: if ((0x7fffffe97fffffeL & l) != 0L) jjAddStates(2, 3); break; case 11: if ((0x7fffffe87fffffeL & l) == 0L) break; if (kind > 56) kind = 56; jjstateSet[jjnewStateCnt++] = 11; break; case 12: if (curChar == 95) jjstateSet[jjnewStateCnt++] = 10; break; default : break; } } while(i != startsAt); } else { int i2 = (curChar & 0xff) >> 6; long l2 = 1L << (curChar & 077); do { switch(jjstateSet[--i]) { default : break; } } while(i != startsAt); } if (kind != 0x7fffffff) { jjmatchedKind = kind; jjmatchedPos = curPos; kind = 0x7fffffff; } ++curPos; if ((i = jjnewStateCnt) == (startsAt = 13 - (jjnewStateCnt = startsAt))) return curPos; try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { return curPos; } } } private int jjMoveStringLiteralDfa0_1() { switch(curChar) { case 42: return jjMoveStringLiteralDfa1_1(0x40L); default : return 1; } } private int jjMoveStringLiteralDfa1_1(long active0) { try { curChar = input_stream.readChar(); } catch(java.io.IOException e) { return 1; } switch(curChar) { case 47: if ((active0 & 0x40L) != 0L) return jjStopAtPos(1, 6); break; default : return 2; } return 2; } static final int[] jjnextStates = { 5, 6, 8, 9, }; /** Token literal values. */ public static final String[] jjstrLiteralImages = { "", null, null, null, null, null, null, null, "\55\76", "\72", "\75", "\56", "\54", "\163\141\155\145", "\144\151\146\146\145\162\145\156\164", "\154\164", "\147\164", "\154\145\161", "\147\145\161", "\151\163", "\156\145\167\111\122\111", "\156\145\167\114\151\164\145\162\141\154", "\154\145\156\147\164\150", "\163\165\142\163\164\162\151\156\147", "\165\160\160\145\162\103\141\163\145", "\154\157\167\145\162\103\141\163\145", "\163\164\141\162\164\163\127\151\164\150", "\145\156\144\163\127\151\164\150", "\154\145\164", "\143\157\156\143\141\164", "\150\141\163", "\166\141\154\165\145\163", "\156\157\164\145\170", "\163\165\155", "\163\165\142", "\155\165\154\164", "\144\151\166", "\156\157\164", "\156\141\155\145\163\160\141\143\145", "\154\157\143\141\154\156\141\155\145", "\163\164\162", "\136", "\165\156\151\157\156", "\143\162\145\141\164\145\114\141\142\145\154", "\160\162\157\160", "\151\163\102\154\141\156\153", "\50", "\51", "\42", "\133", "\135", null, null, null, null, null, null, }; /** Lexer state names. */ public static final String[] lexStateNames = { "DEFAULT", "WithinComment", }; /** Lex State array. */ public static final int[] jjnewLexState = { -1, -1, -1, -1, -1, 1, 0, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, }; static final long[] jjtoToken = { 0x1ffffffffffff01L, }; static final long[] jjtoSkip = { 0x7eL, }; static final long[] jjtoMore = { 0x80L, }; protected SimpleCharStream input_stream; private final int[] jjrounds = new int[13]; private final int[] jjstateSet = new int[26]; protected char curChar; /** Constructor. */ public RuleParserImplTokenManager(SimpleCharStream stream){ if (SimpleCharStream.staticFlag) throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer."); input_stream = stream; } /** Constructor. */ public RuleParserImplTokenManager(SimpleCharStream stream, int lexState){ this(stream); SwitchTo(lexState); } /** Reinitialise parser. */ public void ReInit(SimpleCharStream stream) { jjmatchedPos = jjnewStateCnt = 0; curLexState = defaultLexState; input_stream = stream; ReInitRounds(); } private void ReInitRounds() { int i; jjround = 0x80000001; for (i = 13; i-- > 0;) jjrounds[i] = 0x80000000; } /** Reinitialise parser. */ public void ReInit(SimpleCharStream stream, int lexState) { ReInit(stream); SwitchTo(lexState); } /** Switch to specified lex state. */ public void SwitchTo(int lexState) { if (lexState >= 2 || lexState < 0) throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE); else curLexState = lexState; } protected Token jjFillToken() { final Token t; final String curTokenImage; final int beginLine; final int endLine; final int beginColumn; final int endColumn; String im = jjstrLiteralImages[jjmatchedKind]; curTokenImage = (im == null) ? input_stream.GetImage() : im; beginLine = input_stream.getBeginLine(); beginColumn = input_stream.getBeginColumn(); endLine = input_stream.getEndLine(); endColumn = input_stream.getEndColumn(); t = Token.newToken(jjmatchedKind, curTokenImage); t.beginLine = beginLine; t.endLine = endLine; t.beginColumn = beginColumn; t.endColumn = endColumn; return t; } int curLexState = 0; int defaultLexState = 0; int jjnewStateCnt; int jjround; int jjmatchedPos; int jjmatchedKind; /** Get the next Token. */ public Token getNextToken() { Token matchedToken; int curPos = 0; EOFLoop : for (;;) { try { curChar = input_stream.BeginToken(); } catch(java.io.IOException e) { jjmatchedKind = 0; matchedToken = jjFillToken(); return matchedToken; } for (;;) { switch(curLexState) { case 0: try { input_stream.backup(0); while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L) curChar = input_stream.BeginToken(); } catch (java.io.IOException e1) { continue EOFLoop; } jjmatchedKind = 0x7fffffff; jjmatchedPos = 0; curPos = jjMoveStringLiteralDfa0_0(); break; case 1: jjmatchedKind = 0x7fffffff; jjmatchedPos = 0; curPos = jjMoveStringLiteralDfa0_1(); if (jjmatchedPos == 0 && jjmatchedKind > 7) { jjmatchedKind = 7; } break; } if (jjmatchedKind != 0x7fffffff) { if (jjmatchedPos + 1 < curPos) input_stream.backup(curPos - jjmatchedPos - 1); if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) { matchedToken = jjFillToken(); if (jjnewLexState[jjmatchedKind] != -1) curLexState = jjnewLexState[jjmatchedKind]; return matchedToken; } else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) { if (jjnewLexState[jjmatchedKind] != -1) curLexState = jjnewLexState[jjmatchedKind]; continue EOFLoop; } if (jjnewLexState[jjmatchedKind] != -1) curLexState = jjnewLexState[jjmatchedKind]; curPos = 0; jjmatchedKind = 0x7fffffff; try { curChar = input_stream.readChar(); continue; } catch (java.io.IOException e1) { } } int error_line = input_stream.getEndLine(); int error_column = input_stream.getEndColumn(); String error_after = null; boolean EOFSeen = false; try { input_stream.readChar(); input_stream.backup(1); } catch (java.io.IOException e1) { EOFSeen = true; error_after = curPos <= 1 ? "" : input_stream.GetImage(); if (curChar == '\n' || curChar == '\r') { error_line++; error_column = 0; } else error_column++; } if (!EOFSeen) { input_stream.backup(1); error_after = curPos <= 1 ? "" : input_stream.GetImage(); } throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); } } } private void jjCheckNAdd(int state) { if (jjrounds[state] != jjround) { jjstateSet[jjnewStateCnt++] = state; jjrounds[state] = jjround; } } private void jjAddStates(int start, int end) { do { jjstateSet[jjnewStateCnt++] = jjnextStates[start]; } while (start++ != end); } private void jjCheckNAddTwoStates(int state1, int state2) { jjCheckNAdd(state1); jjCheckNAdd(state2); } }
apache/tinkerpop
35,405
gremlin-core/src/test/java/org/apache/tinkerpop/gremlin/process/traversal/util/TraversalHelperTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.traversal.util; import org.apache.tinkerpop.gremlin.TestDataBuilder; import org.apache.tinkerpop.gremlin.process.computer.traversal.step.map.TraversalVertexProgramStep; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.Pop; import org.apache.tinkerpop.gremlin.process.traversal.Step; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.process.traversal.step.PopContaining; import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.LocalStep; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.RepeatStep; import org.apache.tinkerpop.gremlin.process.traversal.step.branch.UnionStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.FilterStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.HasStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.LambdaFilterStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.PathFilterStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.TraversalFilterStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.WhereTraversalStep; import org.apache.tinkerpop.gremlin.process.traversal.step.filter.NotStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.FlatMapStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.FoldStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStepContract; import org.apache.tinkerpop.gremlin.process.traversal.step.map.PropertiesStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.TraversalFlatMapStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.TraversalMapStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.VertexStep; import org.apache.tinkerpop.gremlin.process.traversal.step.map.VertexStepContract; import org.apache.tinkerpop.gremlin.process.traversal.step.sideEffect.IdentityStep; import org.apache.tinkerpop.gremlin.process.traversal.step.util.EmptyStep; import org.apache.tinkerpop.gremlin.structure.PropertyType; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; import org.junit.Test; import org.mockito.Mockito; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.has; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.in; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.out; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.outE; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.path; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.repeat; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.select; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.union; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.valueMap; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.hamcrest.MatcherAssert.assertThat; /** * @author Marko A. Rodriguez (http://markorodriguez.com) * @author Stephen Mallette (http://stephen.genoprime.com) */ public class TraversalHelperTest { @Test public void shouldSetPreviousStepToEmptyStep() { final Traversal.Admin<?, ?> traversal = __.V().out().asAdmin(); //transform the traversal to __.V().not(out()) //the VertexStep's previousStep should be the EmptyStep Optional<VertexStep> vertexStepOpt = TraversalHelper.getFirstStepOfAssignableClass(VertexStep.class, traversal); assertThat(vertexStepOpt.isPresent(), is(true)); Traversal.Admin<?,?> inner = __.start().asAdmin(); inner.addStep(0, vertexStepOpt.get()); TraversalHelper.replaceStep(vertexStepOpt.get(), new NotStep<>(__.identity().asAdmin(), inner), traversal); List<VertexStep> vertexSteps = TraversalHelper.getStepsOfAssignableClassRecursively(VertexStep.class, traversal); assertEquals(1, vertexSteps.size()); VertexStep vertexStep = vertexSteps.get(0); assertThat("Expected the previousStep to be an EmptyStep, found instead " + vertexStep.getPreviousStep().toString(), vertexStep.getPreviousStep() == EmptyStep.instance(), is(true)); } @Test public void shouldIdentifyLocalChildren() { final Traversal.Admin<?, ?> localChild = __.as("x").select("a", "b").by("name").asAdmin(); new LocalStep<>(new DefaultTraversal(), localChild); assertFalse(TraversalHelper.isGlobalChild(localChild)); /// new WhereTraversalStep<>(new DefaultTraversal(), localChild); assertFalse(TraversalHelper.isGlobalChild(localChild)); /// new TraversalFilterStep<>(new DefaultTraversal(), localChild); assertFalse(TraversalHelper.isGlobalChild(localChild)); /// new TraversalMapStep<>(new DefaultTraversal(), localChild); assertFalse(TraversalHelper.isGlobalChild(localChild)); /// new TraversalFlatMapStep<>(new DefaultTraversal(), localChild); assertFalse(TraversalHelper.isGlobalChild(localChild)); /// final Traversal.Admin<?, ?> remoteLocalChild = __.repeat(localChild).asAdmin(); new LocalStep<>(new DefaultTraversal<>(), remoteLocalChild); assertFalse(TraversalHelper.isGlobalChild(localChild)); } @Test public void shouldIdentifyGlobalChildren() { final Traversal.Admin<?, ?> globalChild = __.select("a", "b").by("name").asAdmin(); TraversalParent parent = new RepeatStep<>(new DefaultTraversal()); ((RepeatStep) parent).setRepeatTraversal(globalChild); assertThat(TraversalHelper.isGlobalChild(globalChild), is(true)); /// new UnionStep<>(new DefaultTraversal(), globalChild); assertThat(TraversalHelper.isGlobalChild(globalChild), is(true)); /// new TraversalVertexProgramStep(new DefaultTraversal<>(), globalChild); assertThat(TraversalHelper.isGlobalChild(globalChild), is(true)); /// final Traversal.Admin<?, ?> remoteRemoteChild = __.repeat(globalChild).asAdmin(); new UnionStep<>(new DefaultTraversal(), remoteRemoteChild); assertThat(TraversalHelper.isGlobalChild(globalChild), is(true)); } @Test public void shouldIdentifyLocalProperties() { assertThat(TraversalHelper.isLocalProperties(__.identity().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalProperties(__.id().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalProperties(__.label().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalProperties(__.values("name").asAdmin()), is(true)); assertFalse(TraversalHelper.isLocalProperties(outE("knows").asAdmin())); } @Test public void shouldNotFindStepOfClassInTraversal() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); assertThat(TraversalHelper.hasStepOfClass(FilterStep.class, traversal), is(false)); } @Test public void shouldFindStepOfClassInTraversal() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new IdentityStep<>(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); assertThat(TraversalHelper.hasStepOfClass(IdentityStep.class, traversal), is(true)); } @Test public void shouldNotFindStepOfAssignableClassInTraversal() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); assertThat(TraversalHelper.hasStepOfAssignableClass(IdentityStep.class, traversal), is(false)); } @Test public void shouldFindStepOfAssignableClassInTraversal() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); assertThat(TraversalHelper.hasStepOfAssignableClass(FilterStep.class, traversal), is(true)); } @Test public void shouldGetTheStepIndex() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); final HasStep hasStep = new HasStep(traversal); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, hasStep); traversal.asAdmin().addStep(0, new HasStep(traversal)); assertEquals(1, TraversalHelper.stepIndex(hasStep, traversal)); } @Test public void shouldNotFindTheStepIndex() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); final IdentityStep identityStep = new IdentityStep(traversal); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new HasStep(traversal)); assertEquals(-1, TraversalHelper.stepIndex(identityStep, traversal)); } @Test public void shouldInsertBeforeStep() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); final HasStep hasStep = new HasStep(traversal); final IdentityStep identityStep = new IdentityStep(traversal); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, hasStep); traversal.asAdmin().addStep(0, new HasStep(traversal)); TraversalHelper.insertBeforeStep(identityStep, hasStep, traversal); assertEquals(traversal.asAdmin().getSteps().get(1), identityStep); assertEquals(4, traversal.asAdmin().getSteps().size()); } @Test public void shouldInsertAfterStep() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); final HasStep hasStep = new HasStep(traversal); final IdentityStep identityStep = new IdentityStep(traversal); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, hasStep); traversal.asAdmin().addStep(0, new HasStep(traversal)); TraversalHelper.insertAfterStep(identityStep, hasStep, traversal); assertEquals(traversal.asAdmin().getSteps().get(2), identityStep); assertEquals(4, traversal.asAdmin().getSteps().size()); } @Test public void shouldReplaceStep() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); final HasStep hasStep = new HasStep(traversal); final IdentityStep identityStep = new IdentityStep(traversal); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, hasStep); traversal.asAdmin().addStep(0, new HasStep(traversal)); TraversalHelper.replaceStep(hasStep, identityStep, traversal); assertEquals(traversal.asAdmin().getSteps().get(1), identityStep); assertEquals(3, traversal.asAdmin().getSteps().size()); } @Test public void shouldChainTogetherStepsWithNextPreviousInALinkedListStructure() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(new IdentityStep(traversal)); traversal.asAdmin().addStep(new HasStep(traversal)); traversal.asAdmin().addStep(new LambdaFilterStep(traversal, traverser -> true)); validateToyTraversal(traversal); } @Test public void shouldAddStepsCorrectly() { Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(0, new LambdaFilterStep(traversal, traverser -> true)); traversal.asAdmin().addStep(0, new HasStep(traversal)); traversal.asAdmin().addStep(0, new IdentityStep(traversal)); validateToyTraversal(traversal); traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(0, new IdentityStep(traversal)); traversal.asAdmin().addStep(1, new HasStep(traversal)); traversal.asAdmin().addStep(2, new LambdaFilterStep(traversal, traverser -> true)); validateToyTraversal(traversal); } @Test public void shouldRemoveStepsCorrectly() { final Traversal.Admin traversal = new DefaultTraversal<>(EmptyGraph.instance()); traversal.asAdmin().addStep(new IdentityStep(traversal)); traversal.asAdmin().addStep(new HasStep(traversal)); traversal.asAdmin().addStep(new LambdaFilterStep(traversal, traverser -> true)); traversal.asAdmin().addStep(new PropertiesStep(traversal, PropertyType.VALUE, "marko")); traversal.asAdmin().removeStep(3); validateToyTraversal(traversal); traversal.asAdmin().addStep(0, new PropertiesStep(traversal, PropertyType.PROPERTY, "marko")); traversal.asAdmin().removeStep(0); validateToyTraversal(traversal); traversal.asAdmin().removeStep(1); traversal.asAdmin().addStep(1, new HasStep(traversal)); validateToyTraversal(traversal); } private static void validateToyTraversal(final Traversal traversal) { assertEquals(traversal.asAdmin().getSteps().size(), 3); assertEquals(IdentityStep.class, traversal.asAdmin().getSteps().get(0).getClass()); assertEquals(HasStep.class, traversal.asAdmin().getSteps().get(1).getClass()); assertEquals(LambdaFilterStep.class, traversal.asAdmin().getSteps().get(2).getClass()); // IDENTITY STEP assertEquals(EmptyStep.class, ((Step) traversal.asAdmin().getSteps().get(0)).getPreviousStep().getClass()); assertEquals(HasStep.class, ((Step) traversal.asAdmin().getSteps().get(0)).getNextStep().getClass()); assertEquals(LambdaFilterStep.class, ((Step) traversal.asAdmin().getSteps().get(0)).getNextStep().getNextStep().getClass()); assertEquals(EmptyStep.class, ((Step) traversal.asAdmin().getSteps().get(0)).getNextStep().getNextStep().getNextStep().getClass()); // HAS STEP assertEquals(IdentityStep.class, ((Step) traversal.asAdmin().getSteps().get(1)).getPreviousStep().getClass()); assertEquals(EmptyStep.class, ((Step) traversal.asAdmin().getSteps().get(1)).getPreviousStep().getPreviousStep().getClass()); assertEquals(LambdaFilterStep.class, ((Step) traversal.asAdmin().getSteps().get(1)).getNextStep().getClass()); assertEquals(EmptyStep.class, ((Step) traversal.asAdmin().getSteps().get(1)).getNextStep().getNextStep().getClass()); // FILTER STEP assertEquals(HasStep.class, ((Step) traversal.asAdmin().getSteps().get(2)).getPreviousStep().getClass()); assertEquals(IdentityStep.class, ((Step) traversal.asAdmin().getSteps().get(2)).getPreviousStep().getPreviousStep().getClass()); assertEquals(EmptyStep.class, ((Step) traversal.asAdmin().getSteps().get(2)).getPreviousStep().getPreviousStep().getPreviousStep().getClass()); assertEquals(EmptyStep.class, ((Step) traversal.asAdmin().getSteps().get(2)).getNextStep().getClass()); assertEquals(3, traversal.asAdmin().getSteps().size()); } @Test public void shouldTruncateLongName() { Step s = Mockito.mock(Step.class); Mockito.when(s.toString()).thenReturn("0123456789"); assertEquals("0123...", TraversalHelper.getShortName(s, 7)); } @Test public void shouldIdentifyStarGraphTraversals() { assertThat(TraversalHelper.isLocalStarGraph(__.identity().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.id().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.out().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.label().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.bothE().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.values().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.properties().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.repeat(__.identity()).asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.repeat(__.has("name")).asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.out().repeat(__.identity()).asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.out().id().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.label().union(__.out(), in()).asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.label().union(__.out(), in()).id().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.coalesce(out("likes"), out("knows"), out("created")).groupCount().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.local(__.out()).groupCount().asAdmin()), is(true)); assertThat(TraversalHelper.isLocalStarGraph(__.local(__.out()).groupCount().by(T.id).asAdmin()), is(true)); // assertTrue(TraversalHelper.isLocalStarGraph(__.out().repeat(__.has("name")).asAdmin())); // assertFalse(TraversalHelper.isLocalStarGraph(__.out().label().asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.out().values().asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.out().valueMap().asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.repeat(__.out()).asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.repeat(__.has("name").out()).asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.repeat(__.has("name").union(__.out(), in())).asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.union(__.out(), in()).label().asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.union(__.out(), in().out()).asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.union(__.out(), __.out().union(in(), __.out())).asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.union(__.values(), __.out().union(in(), __.out())).out().asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.coalesce(out("likes"), out("knows"), out("created")).groupCount().by("name").asAdmin())); assertFalse(TraversalHelper.isLocalStarGraph(__.local(__.out()).groupCount().by("name").asAdmin())); } @Test public void shouldGetStepsByClass() { Set<String> labels = (Set) TraversalHelper.getStepsOfClass(VertexStep.class, __.out().as("a").values("name").as("b").in().as("c").groupCount().as("d").asAdmin()) .stream() .flatMap(s -> s.getLabels().stream()) .collect(Collectors.toSet()); assertEquals(2, labels.size()); assertThat(labels.contains("a"), is(true)); assertThat(labels.contains("c"), is(true)); // labels = (Set) TraversalHelper.getStepsOfAssignableClass(VertexStep.class, __.out().as("a").values("name").as("b").in().as("c").groupCount().as("d").asAdmin()) .stream() .flatMap(s -> s.getLabels().stream()) .collect(Collectors.toSet()); assertEquals(2, labels.size()); assertThat(labels.contains("a"), is(true)); assertThat(labels.contains("c"), is(true)); // labels = (Set) TraversalHelper.getStepsOfAssignableClass(FlatMapStep.class, __.out().as("a").values("name").as("b").in().as("c").groupCount().as("d").asAdmin()) .stream() .flatMap(s -> s.getLabels().stream()) .collect(Collectors.toSet()); assertEquals(3, labels.size()); assertThat(labels.contains("a"), is(true)); assertThat(labels.contains("b"), is(true)); assertThat(labels.contains("c"), is(true)); // labels = (Set) TraversalHelper.getStepsOfClass(Step.class, __.out().as("a").values("name").as("b").in().as("c").groupCount().as("d").asAdmin()) .stream() .flatMap(s -> s.getLabels().stream()) .collect(Collectors.toSet()); assertEquals(0, labels.size()); // labels = (Set) TraversalHelper.getStepsOfAssignableClass(Step.class, __.out().as("a").values("name").as("b").in().as("c").groupCount().as("d").asAdmin()) .stream() .flatMap(s -> s.getLabels().stream()) .collect(Collectors.toSet()); assertEquals(4, labels.size()); assertThat(labels.contains("a"), is(true)); assertThat(labels.contains("b"), is(true)); assertThat(labels.contains("c"), is(true)); assertThat(labels.contains("d"), is(true)); } @Test public void shouldGetLabels() { Set<String> labels = (Set) TraversalHelper.getLabels(__.out().as("a").values("name").as("b").in().as("c").groupCount().as("d").asAdmin()); assertEquals(4, labels.size()); assertThat(labels.contains("a"), is(true)); assertThat(labels.contains("b"), is(true)); assertThat(labels.contains("c"), is(true)); assertThat(labels.contains("d"), is(true)); labels = (Set) TraversalHelper.getLabels(__.out().as("a").repeat(__.out("name").as("b")).local(in().as("c")).as("d").groupCount().by(outE().as("e")).as("f").asAdmin()); assertEquals(6, labels.size()); assertThat(labels.contains("a"), is(true)); assertThat(labels.contains("b"), is(true)); assertThat(labels.contains("c"), is(true)); assertThat(labels.contains("d"), is(true)); assertThat(labels.contains("e"), is(true)); assertThat(labels.contains("f"), is(true)); } @Test public void shouldFindStepsRecursively() { final Traversal<?,?> traversal = __.V().repeat(__.out().simplePath()); assertThat(TraversalHelper.anyStepRecursively(s -> s instanceof PathFilterStep, traversal.asAdmin()), is(true)); } @Test public void shouldGetStepsOfAssignableClassRecursivelyNoTypes() { final Traversal.Admin<?,?> traversal = __.V().repeat(__.out()).project("x").by(out().in().fold()).asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursively(traversal); assertEquals(0, steps.size()); } @Test public void shouldGetStepsOfAssignableClassRecursivelyOneType() { final Traversal.Admin<?,?> traversal = __.V().repeat(__.out()).project("x").by(out().in().fold()).asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursively(traversal, VertexStep.class); assertEquals(3, steps.size()); assertThat(steps.stream().allMatch(s -> s instanceof VertexStep), is(true)); } @Test public void shouldGetStepsOfAssignableClassRecursivelyMultipleTypes() { final Traversal.Admin<?,?> traversal = __.V().repeat(__.out()).project("x").by(out().in().fold()).asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursively(traversal, VertexStep.class, FoldStep.class); assertEquals(4, steps.size()); assertEquals(3, steps.stream().filter(s -> s instanceof VertexStep).count()); assertEquals(1, steps.stream().filter(s -> s instanceof FoldStep).count()); } @Test public void shouldGetStepsOfAssignableClassRecursivelyFromDepthNoTypes() { final Traversal.Admin<?,?> traversal = __.V().repeat(__.out()).project("x").by(out().in().fold()).asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursivelyFromDepth(traversal); assertEquals(0, steps.size()); } @Test public void shouldGetStepsOfAssignableClassRecursivelyFromDepthOneType() { final Traversal.Admin<?,?> traversal = __.V().repeat(__.out()).project("x").by(out().in().fold()).asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursivelyFromDepth(traversal, VertexStep.class); assertEquals(3, steps.size()); assertThat(steps.stream().allMatch(s -> s instanceof VertexStep), is(true)); } @Test public void shouldGetStepsOfAssignableClassRecursivelyFromDepthMultipleTypes() { final Traversal.Admin<?,?> traversal = __.V().repeat(__.out()).project("x").by(out().in().fold()).asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursivelyFromDepth(traversal, VertexStep.class, FoldStep.class); assertEquals(4, steps.size()); assertEquals(3, steps.stream().filter(s -> s instanceof VertexStep).count()); assertEquals(1, steps.stream().filter(s -> s instanceof FoldStep).count()); } @Test public void shouldGetStepsOfAssignableClassRecursivelyFromDepthEnsureOrder() { final Traversal.Admin<?,?> traversal = __.V().union( __.union(__.values("a"), __.union(__.values("b"), __.union(__.values("c"))), __.values("d")), __.values("e")).values("f").asAdmin(); final List<Step<?,?>> steps = TraversalHelper.getStepsOfAssignableClassRecursivelyFromDepth(traversal, PropertiesStep.class); assertEquals(6, steps.size()); assertEquals("c", ((PropertiesStep) steps.get(0)).getPropertyKeys()[0]); assertEquals("b", ((PropertiesStep) steps.get(1)).getPropertyKeys()[0]); assertEquals("d", ((PropertiesStep) steps.get(2)).getPropertyKeys()[0]); assertEquals("a", ((PropertiesStep) steps.get(3)).getPropertyKeys()[0]); assertEquals("e", ((PropertiesStep) steps.get(4)).getPropertyKeys()[0]); assertEquals("f", ((PropertiesStep) steps.get(5)).getPropertyKeys()[0]); } @Test public void shouldGetPopInstructions() { final List<Traversal.Admin<?,?>> traversals = new ArrayList<>(); final List<Set<PopContaining.PopInstruction>> expectedResults = new ArrayList<>(); /// traversals.add(__.V().has("person", "name", "marko").as("start").repeat(out().as("reached").select("start")).times(2).select("reached").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"start", Pop.last}, new Object[]{"reached", Pop.last} )); /// traversals.add(__.V().select("vertex").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"vertex", Pop.last} )); /// traversals.add(__.V().out().as("a").repeat(union(out().select("a"), path().select(Pop.mixed, "b"))).select(Pop.first,"c").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"a", Pop.last}, new Object[]{"b", Pop.mixed}, new Object[]{"c", Pop.first} )); /// traversals.add(__.V().as("b").repeat(select("b").out().as("a")).times(2).select(Pop.first, "a").select(Pop.last, "a").project("bb").by(__.select(Pop.all, "a")).asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"b", Pop.last}, new Object[]{"a", Pop.first}, new Object[]{"a", Pop.all}, new Object[]{"a", Pop.last} )); /// traversals.add(__.V("1").as("b").repeat(select("b").out().as("a")).times(2).path().as("c").by("name").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"b", Pop.last} )); /// traversals.add(__.V().union(out().as("a"), repeat(out().as("a")).emit()).select(Pop.last, "a").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"a", Pop.last} )); /// traversals.add(__.V().has("person", "name", "marko").as("start").repeat(out()).times(2).where(P.neq("start")).values("name").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"start", Pop.last} )); /// traversals.add(__.V().union(out(), repeat(out().as("a")).emit()).select(Pop.last, "a").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"a", Pop.last} )); /// traversals.add(__.V().as("a").union(path(), repeat(out().select(Pop.last, "a"))).asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"a", Pop.last} )); /// traversals.add(__.V().hasLabel("person").repeat(out("created")).emit().as("software").select("software").values("name", "lang").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"software", Pop.last} )); /// traversals.add(__.V().hasLabel("person").repeat(out("created").as("created_thing")).emit().as("final").select(Pop.mixed, "created_thing", "final"). by("name").by("lang").asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"created_thing", Pop.mixed}, new Object[]{"final", Pop.mixed} )); /// traversals.add(__.V().has("person", "name", "marko").as("start").repeat(out().as("path_element")).until(has("lang")).as("software").select("start", "path_element", "software").by("name").by("name").by(valueMap("name", "lang")).asAdmin()); expectedResults.add(TestDataBuilder.createPopInstructionSet( new Object[]{"start", Pop.last}, new Object[]{"path_element", Pop.last}, new Object[]{"software", Pop.last} )); // Run all the tests for (int i = 0; i < traversals.size(); i++) { assertEquals(TraversalHelper.getPopInstructions(traversals.get(i)), expectedResults.get(i)); } } @Test public void shouldUseContractRegistryInGetStepsOfClass() { // Build a traversal that will include a GraphStepPlaceholder as start (V()) and then some steps final Traversal.Admin<?,?> t = __.V().out().values("name").asAdmin(); // Ensure that asking for GraphStepContract.class returns the start step final List<Step<?,?>> steps = (List) TraversalHelper.getStepsOfClass(org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStepContract.class, t); // There should be exactly one GraphStep* at the start assertEquals(1, steps.size()); // And it should be one of the registered concrete classes final Class<?> sc = steps.get(0).getClass(); final java.util.List<Class<? extends Step>> concretes = org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStepContract.CONCRETE_STEPS; assertThat(concretes.stream().anyMatch(c -> c.equals(sc)), is(true)); } @Test public void shouldNotAffectNonRegisteredInterfaces() { // Use a random interface that is not a registered contract final Traversal.Admin<?,?> t = __.out().in().asAdmin(); // Step is an interface but exact equality semantics should apply and thus return empty here final List<Step<?,?>> steps = (List) TraversalHelper.getStepsOfClass(Step.class, t); assertEquals(0, steps.size()); } @Test public void hasOnlyShouldReturnTrueWhenAllStepsAreAssignable() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively( Set.of(GraphStep.class, VertexStep.class, HasStep.class), __.V().out().has("name", "marko").asAdmin()), is(true)); } @Test public void hasOnlyShouldReturnFalseWhenStepNotAssignable() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively( Set.of(GraphStep.class, VertexStep.class), __.V().out().count().asAdmin()), is(false)); } @Test public void hasOnlyShouldWorkWithInterfaceClasses() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively( Set.of(GraphStepContract.class, VertexStepContract.class), __.V().out().asAdmin()), is(true)); } @Test public void hasOnlyShouldWorkRecursivelyWithNestedTraversals() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively( Set.of(GraphStep.class, RepeatStep.class, RepeatStep.RepeatEndStep.class, VertexStep.class, HasStep.class), __.V().repeat(__.out().has("name", "marko")).times(2).asAdmin()), is(true)); } @Test public void hasOnlyShouldReturnFalseForNestedTraversalWithDisallowedStep() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively( Set.of(GraphStep.class, RepeatStep.class, VertexStep.class), __.V().repeat(__.out().limit(1)).times(2).asAdmin()), is(false)); } @Test public void hasOnlyShouldReturnTrueForEmptyTraversal() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively(Set.of(IdentityStep.class), __.identity().asAdmin()), is(true)); } @Test public void hasOnlyShouldReturnFalseForEmptyAllowedClasses() { assertThat(TraversalHelper.hasOnlyStepsOfAssignableClassesRecursively(Set.of(), __.V().out().asAdmin()), is(false)); } }
google/ExoPlayer
35,553
library/hls/src/main/java/com/google/android/exoplayer2/source/hls/playlist/DefaultHlsPlaylistTracker.java
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source.hls.playlist; import static com.google.android.exoplayer2.util.Assertions.checkNotNull; import static com.google.android.exoplayer2.util.Util.castNonNull; import static java.lang.Math.max; import android.net.Uri; import android.os.Handler; import android.os.SystemClock; import androidx.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ParserException; import com.google.android.exoplayer2.source.LoadEventInfo; import com.google.android.exoplayer2.source.MediaLoadData; import com.google.android.exoplayer2.source.MediaSourceEventListener.EventDispatcher; import com.google.android.exoplayer2.source.hls.HlsDataSourceFactory; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Part; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.RenditionReport; import com.google.android.exoplayer2.source.hls.playlist.HlsMediaPlaylist.Segment; import com.google.android.exoplayer2.source.hls.playlist.HlsMultivariantPlaylist.Variant; import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy; import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy.LoadErrorInfo; import com.google.android.exoplayer2.upstream.Loader; import com.google.android.exoplayer2.upstream.Loader.LoadErrorAction; import com.google.android.exoplayer2.upstream.ParsingLoadable; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Util; import com.google.common.collect.Iterables; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; /** * Default implementation for {@link HlsPlaylistTracker}. * * @deprecated com.google.android.exoplayer2 is deprecated. Please migrate to androidx.media3 (which * contains the same ExoPlayer code). See <a * href="https://developer.android.com/guide/topics/media/media3/getting-started/migration-guide">the * migration guide</a> for more details, including a script to help with the migration. */ @Deprecated public final class DefaultHlsPlaylistTracker implements HlsPlaylistTracker, Loader.Callback<ParsingLoadable<HlsPlaylist>> { /** Factory for {@link DefaultHlsPlaylistTracker} instances. */ public static final Factory FACTORY = DefaultHlsPlaylistTracker::new; /** * Default coefficient applied on the target duration of a playlist to determine the amount of * time after which an unchanging playlist is considered stuck. */ public static final double DEFAULT_PLAYLIST_STUCK_TARGET_DURATION_COEFFICIENT = 3.5; private final HlsDataSourceFactory dataSourceFactory; private final HlsPlaylistParserFactory playlistParserFactory; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy; private final HashMap<Uri, MediaPlaylistBundle> playlistBundles; private final CopyOnWriteArrayList<PlaylistEventListener> listeners; private final double playlistStuckTargetDurationCoefficient; @Nullable private EventDispatcher eventDispatcher; @Nullable private Loader initialPlaylistLoader; @Nullable private Handler playlistRefreshHandler; @Nullable private PrimaryPlaylistListener primaryPlaylistListener; @Nullable private HlsMultivariantPlaylist multivariantPlaylist; @Nullable private Uri primaryMediaPlaylistUrl; @Nullable private HlsMediaPlaylist primaryMediaPlaylistSnapshot; private boolean isLive; private long initialStartTimeUs; /** * Creates an instance. * * @param dataSourceFactory A factory for {@link DataSource} instances. * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy}. * @param playlistParserFactory An {@link HlsPlaylistParserFactory}. */ public DefaultHlsPlaylistTracker( HlsDataSourceFactory dataSourceFactory, LoadErrorHandlingPolicy loadErrorHandlingPolicy, HlsPlaylistParserFactory playlistParserFactory) { this( dataSourceFactory, loadErrorHandlingPolicy, playlistParserFactory, DEFAULT_PLAYLIST_STUCK_TARGET_DURATION_COEFFICIENT); } /** * Creates an instance. * * @param dataSourceFactory A factory for {@link DataSource} instances. * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy}. * @param playlistParserFactory An {@link HlsPlaylistParserFactory}. * @param playlistStuckTargetDurationCoefficient A coefficient to apply to the target duration of * media playlists in order to determine that a non-changing playlist is stuck. Once a * playlist is deemed stuck, a {@link PlaylistStuckException} is thrown via {@link * #maybeThrowPlaylistRefreshError(Uri)}. */ public DefaultHlsPlaylistTracker( HlsDataSourceFactory dataSourceFactory, LoadErrorHandlingPolicy loadErrorHandlingPolicy, HlsPlaylistParserFactory playlistParserFactory, double playlistStuckTargetDurationCoefficient) { this.dataSourceFactory = dataSourceFactory; this.playlistParserFactory = playlistParserFactory; this.loadErrorHandlingPolicy = loadErrorHandlingPolicy; this.playlistStuckTargetDurationCoefficient = playlistStuckTargetDurationCoefficient; listeners = new CopyOnWriteArrayList<>(); playlistBundles = new HashMap<>(); initialStartTimeUs = C.TIME_UNSET; } // HlsPlaylistTracker implementation. @Override public void start( Uri initialPlaylistUri, EventDispatcher eventDispatcher, PrimaryPlaylistListener primaryPlaylistListener) { this.playlistRefreshHandler = Util.createHandlerForCurrentLooper(); this.eventDispatcher = eventDispatcher; this.primaryPlaylistListener = primaryPlaylistListener; ParsingLoadable<HlsPlaylist> multivariantPlaylistLoadable = new ParsingLoadable<>( dataSourceFactory.createDataSource(C.DATA_TYPE_MANIFEST), initialPlaylistUri, C.DATA_TYPE_MANIFEST, playlistParserFactory.createPlaylistParser()); Assertions.checkState(initialPlaylistLoader == null); initialPlaylistLoader = new Loader("DefaultHlsPlaylistTracker:MultivariantPlaylist"); long elapsedRealtime = initialPlaylistLoader.startLoading( multivariantPlaylistLoadable, this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount( multivariantPlaylistLoadable.type)); eventDispatcher.loadStarted( new LoadEventInfo( multivariantPlaylistLoadable.loadTaskId, multivariantPlaylistLoadable.dataSpec, elapsedRealtime), multivariantPlaylistLoadable.type); } @Override public void stop() { primaryMediaPlaylistUrl = null; primaryMediaPlaylistSnapshot = null; multivariantPlaylist = null; initialStartTimeUs = C.TIME_UNSET; initialPlaylistLoader.release(); initialPlaylistLoader = null; for (MediaPlaylistBundle bundle : playlistBundles.values()) { bundle.release(); } playlistRefreshHandler.removeCallbacksAndMessages(null); playlistRefreshHandler = null; playlistBundles.clear(); } @Override public void addListener(PlaylistEventListener listener) { checkNotNull(listener); listeners.add(listener); } @Override public void removeListener(PlaylistEventListener listener) { listeners.remove(listener); } @Override @Nullable public HlsMultivariantPlaylist getMultivariantPlaylist() { return multivariantPlaylist; } @Override @Nullable public HlsMediaPlaylist getPlaylistSnapshot(Uri url, boolean isForPlayback) { @Nullable HlsMediaPlaylist snapshot = playlistBundles.get(url).getPlaylistSnapshot(); if (snapshot != null && isForPlayback) { maybeSetPrimaryUrl(url); } return snapshot; } @Override public long getInitialStartTimeUs() { return initialStartTimeUs; } @Override public boolean isSnapshotValid(Uri url) { return playlistBundles.get(url).isSnapshotValid(); } @Override public void maybeThrowPrimaryPlaylistRefreshError() throws IOException { if (initialPlaylistLoader != null) { initialPlaylistLoader.maybeThrowError(); } if (primaryMediaPlaylistUrl != null) { maybeThrowPlaylistRefreshError(primaryMediaPlaylistUrl); } } @Override public void maybeThrowPlaylistRefreshError(Uri url) throws IOException { playlistBundles.get(url).maybeThrowPlaylistRefreshError(); } @Override public void refreshPlaylist(Uri url) { playlistBundles.get(url).loadPlaylist(); } @Override public boolean isLive() { return isLive; } @Override public boolean excludeMediaPlaylist(Uri playlistUrl, long exclusionDurationMs) { @Nullable MediaPlaylistBundle bundle = playlistBundles.get(playlistUrl); if (bundle != null) { return !bundle.excludePlaylist(exclusionDurationMs); } return false; } // Loader.Callback implementation. @Override public void onLoadCompleted( ParsingLoadable<HlsPlaylist> loadable, long elapsedRealtimeMs, long loadDurationMs) { HlsPlaylist result = loadable.getResult(); HlsMultivariantPlaylist multivariantPlaylist; boolean isMediaPlaylist = result instanceof HlsMediaPlaylist; if (isMediaPlaylist) { multivariantPlaylist = HlsMultivariantPlaylist.createSingleVariantMultivariantPlaylist(result.baseUri); } else /* result instanceof HlsMultivariantPlaylist */ { multivariantPlaylist = (HlsMultivariantPlaylist) result; } this.multivariantPlaylist = multivariantPlaylist; primaryMediaPlaylistUrl = multivariantPlaylist.variants.get(0).url; // Add a temporary playlist listener for loading the first primary playlist. listeners.add(new FirstPrimaryMediaPlaylistListener()); createBundles(multivariantPlaylist.mediaPlaylistUrls); LoadEventInfo loadEventInfo = new LoadEventInfo( loadable.loadTaskId, loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded()); MediaPlaylistBundle primaryBundle = playlistBundles.get(primaryMediaPlaylistUrl); if (isMediaPlaylist) { // We don't need to load the playlist again. We can use the same result. primaryBundle.processLoadedPlaylist((HlsMediaPlaylist) result, loadEventInfo); } else { primaryBundle.loadPlaylist(); } loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); eventDispatcher.loadCompleted(loadEventInfo, C.DATA_TYPE_MANIFEST); } @Override public void onLoadCanceled( ParsingLoadable<HlsPlaylist> loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { LoadEventInfo loadEventInfo = new LoadEventInfo( loadable.loadTaskId, loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded()); loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); eventDispatcher.loadCanceled(loadEventInfo, C.DATA_TYPE_MANIFEST); } @Override public LoadErrorAction onLoadError( ParsingLoadable<HlsPlaylist> loadable, long elapsedRealtimeMs, long loadDurationMs, IOException error, int errorCount) { LoadEventInfo loadEventInfo = new LoadEventInfo( loadable.loadTaskId, loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded()); MediaLoadData mediaLoadData = new MediaLoadData(loadable.type); long retryDelayMs = loadErrorHandlingPolicy.getRetryDelayMsFor( new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount)); boolean isFatal = retryDelayMs == C.TIME_UNSET; eventDispatcher.loadError(loadEventInfo, loadable.type, error, isFatal); if (isFatal) { loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); } return isFatal ? Loader.DONT_RETRY_FATAL : Loader.createRetryAction(/* resetErrorCount= */ false, retryDelayMs); } // Internal methods. private boolean maybeSelectNewPrimaryUrl() { List<Variant> variants = multivariantPlaylist.variants; int variantsSize = variants.size(); long currentTimeMs = SystemClock.elapsedRealtime(); for (int i = 0; i < variantsSize; i++) { MediaPlaylistBundle bundle = checkNotNull(playlistBundles.get(variants.get(i).url)); if (currentTimeMs > bundle.excludeUntilMs) { primaryMediaPlaylistUrl = bundle.playlistUrl; bundle.loadPlaylistInternal(getRequestUriForPrimaryChange(primaryMediaPlaylistUrl)); return true; } } return false; } private void maybeSetPrimaryUrl(Uri url) { if (url.equals(primaryMediaPlaylistUrl) || !isVariantUrl(url) || (primaryMediaPlaylistSnapshot != null && primaryMediaPlaylistSnapshot.hasEndTag)) { // Ignore if the primary media playlist URL is unchanged, if the media playlist is not // referenced directly by a variant, or it the last primary snapshot contains an end tag. return; } primaryMediaPlaylistUrl = url; MediaPlaylistBundle newPrimaryBundle = playlistBundles.get(primaryMediaPlaylistUrl); @Nullable HlsMediaPlaylist newPrimarySnapshot = newPrimaryBundle.playlistSnapshot; if (newPrimarySnapshot != null && newPrimarySnapshot.hasEndTag) { primaryMediaPlaylistSnapshot = newPrimarySnapshot; primaryPlaylistListener.onPrimaryPlaylistRefreshed(newPrimarySnapshot); } else { // The snapshot for the new primary media playlist URL may be stale. Defer updating the // primary snapshot until after we've refreshed it. newPrimaryBundle.loadPlaylistInternal(getRequestUriForPrimaryChange(url)); } } private Uri getRequestUriForPrimaryChange(Uri newPrimaryPlaylistUri) { if (primaryMediaPlaylistSnapshot != null && primaryMediaPlaylistSnapshot.serverControl.canBlockReload) { @Nullable RenditionReport renditionReport = primaryMediaPlaylistSnapshot.renditionReports.get(newPrimaryPlaylistUri); if (renditionReport != null) { Uri.Builder uriBuilder = newPrimaryPlaylistUri.buildUpon(); uriBuilder.appendQueryParameter( MediaPlaylistBundle.BLOCK_MSN_PARAM, String.valueOf(renditionReport.lastMediaSequence)); if (renditionReport.lastPartIndex != C.INDEX_UNSET) { uriBuilder.appendQueryParameter( MediaPlaylistBundle.BLOCK_PART_PARAM, String.valueOf(renditionReport.lastPartIndex)); } return uriBuilder.build(); } } return newPrimaryPlaylistUri; } /** * Returns whether any of the variants in the multivariant playlist have the specified playlist * URL. */ private boolean isVariantUrl(Uri playlistUrl) { List<Variant> variants = multivariantPlaylist.variants; for (int i = 0; i < variants.size(); i++) { if (playlistUrl.equals(variants.get(i).url)) { return true; } } return false; } private void createBundles(List<Uri> urls) { int listSize = urls.size(); for (int i = 0; i < listSize; i++) { Uri url = urls.get(i); MediaPlaylistBundle bundle = new MediaPlaylistBundle(url); playlistBundles.put(url, bundle); } } /** * Called by the bundles when a snapshot changes. * * @param url The url of the playlist. * @param newSnapshot The new snapshot. */ private void onPlaylistUpdated(Uri url, HlsMediaPlaylist newSnapshot) { if (url.equals(primaryMediaPlaylistUrl)) { if (primaryMediaPlaylistSnapshot == null) { // This is the first primary url snapshot. isLive = !newSnapshot.hasEndTag; initialStartTimeUs = newSnapshot.startTimeUs; } primaryMediaPlaylistSnapshot = newSnapshot; primaryPlaylistListener.onPrimaryPlaylistRefreshed(newSnapshot); } for (PlaylistEventListener listener : listeners) { listener.onPlaylistChanged(); } } private boolean notifyPlaylistError( Uri playlistUrl, LoadErrorInfo loadErrorInfo, boolean forceRetry) { boolean anyExclusionFailed = false; for (PlaylistEventListener listener : listeners) { anyExclusionFailed |= !listener.onPlaylistError(playlistUrl, loadErrorInfo, forceRetry); } return anyExclusionFailed; } private HlsMediaPlaylist getLatestPlaylistSnapshot( @Nullable HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { if (!loadedPlaylist.isNewerThan(oldPlaylist)) { if (loadedPlaylist.hasEndTag) { // If the loaded playlist has an end tag but is not newer than the old playlist then we have // an inconsistent state. This is typically caused by the server incorrectly resetting the // media sequence when appending the end tag. We resolve this case as best we can by // returning the old playlist with the end tag appended. return oldPlaylist.copyWithEndTag(); } else { return oldPlaylist; } } long startTimeUs = getLoadedPlaylistStartTimeUs(oldPlaylist, loadedPlaylist); int discontinuitySequence = getLoadedPlaylistDiscontinuitySequence(oldPlaylist, loadedPlaylist); return loadedPlaylist.copyWith(startTimeUs, discontinuitySequence); } private long getLoadedPlaylistStartTimeUs( @Nullable HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { if (loadedPlaylist.hasProgramDateTime) { return loadedPlaylist.startTimeUs; } long primarySnapshotStartTimeUs = primaryMediaPlaylistSnapshot != null ? primaryMediaPlaylistSnapshot.startTimeUs : 0; if (oldPlaylist == null) { return primarySnapshotStartTimeUs; } int oldPlaylistSize = oldPlaylist.segments.size(); Segment firstOldOverlappingSegment = getFirstOldOverlappingSegment(oldPlaylist, loadedPlaylist); if (firstOldOverlappingSegment != null) { return oldPlaylist.startTimeUs + firstOldOverlappingSegment.relativeStartTimeUs; } else if (oldPlaylistSize == loadedPlaylist.mediaSequence - oldPlaylist.mediaSequence) { return oldPlaylist.getEndTimeUs(); } else { // No segments overlap, we assume the new playlist start coincides with the primary playlist. return primarySnapshotStartTimeUs; } } private int getLoadedPlaylistDiscontinuitySequence( @Nullable HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { if (loadedPlaylist.hasDiscontinuitySequence) { return loadedPlaylist.discontinuitySequence; } // TODO: Improve cross-playlist discontinuity adjustment. int primaryUrlDiscontinuitySequence = primaryMediaPlaylistSnapshot != null ? primaryMediaPlaylistSnapshot.discontinuitySequence : 0; if (oldPlaylist == null) { return primaryUrlDiscontinuitySequence; } Segment firstOldOverlappingSegment = getFirstOldOverlappingSegment(oldPlaylist, loadedPlaylist); if (firstOldOverlappingSegment != null) { return oldPlaylist.discontinuitySequence + firstOldOverlappingSegment.relativeDiscontinuitySequence - loadedPlaylist.segments.get(0).relativeDiscontinuitySequence; } return primaryUrlDiscontinuitySequence; } private static Segment getFirstOldOverlappingSegment( HlsMediaPlaylist oldPlaylist, HlsMediaPlaylist loadedPlaylist) { int mediaSequenceOffset = (int) (loadedPlaylist.mediaSequence - oldPlaylist.mediaSequence); List<Segment> oldSegments = oldPlaylist.segments; return mediaSequenceOffset < oldSegments.size() ? oldSegments.get(mediaSequenceOffset) : null; } /** Holds all information related to a specific Media Playlist. */ private final class MediaPlaylistBundle implements Loader.Callback<ParsingLoadable<HlsPlaylist>> { private static final String BLOCK_MSN_PARAM = "_HLS_msn"; private static final String BLOCK_PART_PARAM = "_HLS_part"; private static final String SKIP_PARAM = "_HLS_skip"; private final Uri playlistUrl; private final Loader mediaPlaylistLoader; private final DataSource mediaPlaylistDataSource; @Nullable private HlsMediaPlaylist playlistSnapshot; private long lastSnapshotLoadMs; private long lastSnapshotChangeMs; private long earliestNextLoadTimeMs; private long excludeUntilMs; private boolean loadPending; @Nullable private IOException playlistError; public MediaPlaylistBundle(Uri playlistUrl) { this.playlistUrl = playlistUrl; mediaPlaylistLoader = new Loader("DefaultHlsPlaylistTracker:MediaPlaylist"); mediaPlaylistDataSource = dataSourceFactory.createDataSource(C.DATA_TYPE_MANIFEST); } @Nullable public HlsMediaPlaylist getPlaylistSnapshot() { return playlistSnapshot; } public boolean isSnapshotValid() { if (playlistSnapshot == null) { return false; } long currentTimeMs = SystemClock.elapsedRealtime(); long snapshotValidityDurationMs = max(30000, Util.usToMs(playlistSnapshot.durationUs)); return playlistSnapshot.hasEndTag || playlistSnapshot.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_EVENT || playlistSnapshot.playlistType == HlsMediaPlaylist.PLAYLIST_TYPE_VOD || lastSnapshotLoadMs + snapshotValidityDurationMs > currentTimeMs; } public void loadPlaylist() { loadPlaylistInternal(playlistUrl); } public void maybeThrowPlaylistRefreshError() throws IOException { mediaPlaylistLoader.maybeThrowError(); if (playlistError != null) { throw playlistError; } } public void release() { mediaPlaylistLoader.release(); } // Loader.Callback implementation. @Override public void onLoadCompleted( ParsingLoadable<HlsPlaylist> loadable, long elapsedRealtimeMs, long loadDurationMs) { @Nullable HlsPlaylist result = loadable.getResult(); LoadEventInfo loadEventInfo = new LoadEventInfo( loadable.loadTaskId, loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded()); if (result instanceof HlsMediaPlaylist) { processLoadedPlaylist((HlsMediaPlaylist) result, loadEventInfo); eventDispatcher.loadCompleted(loadEventInfo, C.DATA_TYPE_MANIFEST); } else { playlistError = ParserException.createForMalformedManifest( "Loaded playlist has unexpected type.", /* cause= */ null); eventDispatcher.loadError( loadEventInfo, C.DATA_TYPE_MANIFEST, playlistError, /* wasCanceled= */ true); } loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); } @Override public void onLoadCanceled( ParsingLoadable<HlsPlaylist> loadable, long elapsedRealtimeMs, long loadDurationMs, boolean released) { LoadEventInfo loadEventInfo = new LoadEventInfo( loadable.loadTaskId, loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded()); loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); eventDispatcher.loadCanceled(loadEventInfo, C.DATA_TYPE_MANIFEST); } @Override public LoadErrorAction onLoadError( ParsingLoadable<HlsPlaylist> loadable, long elapsedRealtimeMs, long loadDurationMs, IOException error, int errorCount) { LoadEventInfo loadEventInfo = new LoadEventInfo( loadable.loadTaskId, loadable.dataSpec, loadable.getUri(), loadable.getResponseHeaders(), elapsedRealtimeMs, loadDurationMs, loadable.bytesLoaded()); boolean isBlockingRequest = loadable.getUri().getQueryParameter(BLOCK_MSN_PARAM) != null; boolean deltaUpdateFailed = error instanceof HlsPlaylistParser.DeltaUpdateException; if (isBlockingRequest || deltaUpdateFailed) { int responseCode = Integer.MAX_VALUE; if (error instanceof HttpDataSource.InvalidResponseCodeException) { responseCode = ((HttpDataSource.InvalidResponseCodeException) error).responseCode; } if (deltaUpdateFailed || responseCode == 400 || responseCode == 503) { // Intercept failed delta updates and blocking requests producing a Bad Request (400) and // Service Unavailable (503). In such cases, force a full, non-blocking request (see RFC // 8216, section 6.2.5.2 and 6.3.7). earliestNextLoadTimeMs = SystemClock.elapsedRealtime(); loadPlaylist(); castNonNull(eventDispatcher) .loadError(loadEventInfo, loadable.type, error, /* wasCanceled= */ true); return Loader.DONT_RETRY; } } MediaLoadData mediaLoadData = new MediaLoadData(loadable.type); LoadErrorInfo loadErrorInfo = new LoadErrorInfo(loadEventInfo, mediaLoadData, error, errorCount); boolean exclusionFailed = notifyPlaylistError(playlistUrl, loadErrorInfo, /* forceRetry= */ false); LoadErrorAction loadErrorAction; if (exclusionFailed) { long retryDelay = loadErrorHandlingPolicy.getRetryDelayMsFor(loadErrorInfo); loadErrorAction = retryDelay != C.TIME_UNSET ? Loader.createRetryAction(false, retryDelay) : Loader.DONT_RETRY_FATAL; } else { loadErrorAction = Loader.DONT_RETRY; } boolean wasCanceled = !loadErrorAction.isRetry(); eventDispatcher.loadError(loadEventInfo, loadable.type, error, wasCanceled); if (wasCanceled) { loadErrorHandlingPolicy.onLoadTaskConcluded(loadable.loadTaskId); } return loadErrorAction; } // Internal methods. private void loadPlaylistInternal(Uri playlistRequestUri) { excludeUntilMs = 0; if (loadPending || mediaPlaylistLoader.isLoading() || mediaPlaylistLoader.hasFatalError()) { // Load already pending, in progress, or a fatal error has been encountered. Do nothing. return; } long currentTimeMs = SystemClock.elapsedRealtime(); if (currentTimeMs < earliestNextLoadTimeMs) { loadPending = true; playlistRefreshHandler.postDelayed( () -> { loadPending = false; loadPlaylistImmediately(playlistRequestUri); }, earliestNextLoadTimeMs - currentTimeMs); } else { loadPlaylistImmediately(playlistRequestUri); } } private void loadPlaylistImmediately(Uri playlistRequestUri) { ParsingLoadable.Parser<HlsPlaylist> mediaPlaylistParser = playlistParserFactory.createPlaylistParser(multivariantPlaylist, playlistSnapshot); ParsingLoadable<HlsPlaylist> mediaPlaylistLoadable = new ParsingLoadable<>( mediaPlaylistDataSource, playlistRequestUri, C.DATA_TYPE_MANIFEST, mediaPlaylistParser); long elapsedRealtime = mediaPlaylistLoader.startLoading( mediaPlaylistLoadable, /* callback= */ this, loadErrorHandlingPolicy.getMinimumLoadableRetryCount(mediaPlaylistLoadable.type)); eventDispatcher.loadStarted( new LoadEventInfo( mediaPlaylistLoadable.loadTaskId, mediaPlaylistLoadable.dataSpec, elapsedRealtime), mediaPlaylistLoadable.type); } private void processLoadedPlaylist( HlsMediaPlaylist loadedPlaylist, LoadEventInfo loadEventInfo) { @Nullable HlsMediaPlaylist oldPlaylist = playlistSnapshot; long currentTimeMs = SystemClock.elapsedRealtime(); lastSnapshotLoadMs = currentTimeMs; playlistSnapshot = getLatestPlaylistSnapshot(oldPlaylist, loadedPlaylist); if (playlistSnapshot != oldPlaylist) { playlistError = null; lastSnapshotChangeMs = currentTimeMs; onPlaylistUpdated(playlistUrl, playlistSnapshot); } else if (!playlistSnapshot.hasEndTag) { boolean forceRetry = false; @Nullable IOException playlistError = null; if (loadedPlaylist.mediaSequence + loadedPlaylist.segments.size() < playlistSnapshot.mediaSequence) { // TODO: Allow customization of playlist resets handling. // The media sequence jumped backwards. The server has probably reset. We do not try // excluding in this case. forceRetry = true; playlistError = new PlaylistResetException(playlistUrl); } else if (currentTimeMs - lastSnapshotChangeMs > Util.usToMs(playlistSnapshot.targetDurationUs) * playlistStuckTargetDurationCoefficient) { // TODO: Allow customization of stuck playlists handling. playlistError = new PlaylistStuckException(playlistUrl); } if (playlistError != null) { this.playlistError = playlistError; notifyPlaylistError( playlistUrl, new LoadErrorInfo( loadEventInfo, new MediaLoadData(C.DATA_TYPE_MANIFEST), playlistError, /* errorCount= */ 1), forceRetry); } } long durationUntilNextLoadUs = 0L; if (!playlistSnapshot.serverControl.canBlockReload) { // If blocking requests are not supported, do not allow the playlist to load again within // the target duration if we obtained a new snapshot, or half the target duration otherwise. durationUntilNextLoadUs = playlistSnapshot != oldPlaylist ? playlistSnapshot.targetDurationUs : (playlistSnapshot.targetDurationUs / 2); } earliestNextLoadTimeMs = currentTimeMs + Util.usToMs(durationUntilNextLoadUs); // Schedule a load if this is the primary playlist or a playlist of a low-latency stream and // it doesn't have an end tag. Else the next load will be scheduled when refreshPlaylist is // called, or when this playlist becomes the primary. boolean scheduleLoad = playlistSnapshot.partTargetDurationUs != C.TIME_UNSET || playlistUrl.equals(primaryMediaPlaylistUrl); if (scheduleLoad && !playlistSnapshot.hasEndTag) { loadPlaylistInternal(getMediaPlaylistUriForReload()); } } private Uri getMediaPlaylistUriForReload() { if (playlistSnapshot == null || (playlistSnapshot.serverControl.skipUntilUs == C.TIME_UNSET && !playlistSnapshot.serverControl.canBlockReload)) { return playlistUrl; } Uri.Builder uriBuilder = playlistUrl.buildUpon(); if (playlistSnapshot.serverControl.canBlockReload) { long targetMediaSequence = playlistSnapshot.mediaSequence + playlistSnapshot.segments.size(); uriBuilder.appendQueryParameter(BLOCK_MSN_PARAM, String.valueOf(targetMediaSequence)); if (playlistSnapshot.partTargetDurationUs != C.TIME_UNSET) { List<Part> trailingParts = playlistSnapshot.trailingParts; int targetPartIndex = trailingParts.size(); if (!trailingParts.isEmpty() && Iterables.getLast(trailingParts).isPreload) { // Ignore the preload part. targetPartIndex--; } uriBuilder.appendQueryParameter(BLOCK_PART_PARAM, String.valueOf(targetPartIndex)); } } if (playlistSnapshot.serverControl.skipUntilUs != C.TIME_UNSET) { uriBuilder.appendQueryParameter( SKIP_PARAM, playlistSnapshot.serverControl.canSkipDateRanges ? "v2" : "YES"); } return uriBuilder.build(); } /** * Excludes the playlist. * * @param exclusionDurationMs The number of milliseconds for which the playlist should be * excluded. * @return Whether the playlist is the primary, despite being excluded. */ private boolean excludePlaylist(long exclusionDurationMs) { excludeUntilMs = SystemClock.elapsedRealtime() + exclusionDurationMs; return playlistUrl.equals(primaryMediaPlaylistUrl) && !maybeSelectNewPrimaryUrl(); } } /** * Takes care of handling load errors of the first media playlist and applies exclusion according * to the {@link LoadErrorHandlingPolicy} before the first media period has been created and * prepared. */ private class FirstPrimaryMediaPlaylistListener implements PlaylistEventListener { @Override public void onPlaylistChanged() { // Remove the temporary playlist listener that is waiting for the first playlist only. listeners.remove(this); } @Override public boolean onPlaylistError(Uri url, LoadErrorInfo loadErrorInfo, boolean forceRetry) { if (primaryMediaPlaylistSnapshot == null) { long nowMs = SystemClock.elapsedRealtime(); int variantExclusionCounter = 0; List<Variant> variants = castNonNull(multivariantPlaylist).variants; for (int i = 0; i < variants.size(); i++) { @Nullable MediaPlaylistBundle mediaPlaylistBundle = playlistBundles.get(variants.get(i).url); if (mediaPlaylistBundle != null && nowMs < mediaPlaylistBundle.excludeUntilMs) { variantExclusionCounter++; } } LoadErrorHandlingPolicy.FallbackOptions fallbackOptions = new LoadErrorHandlingPolicy.FallbackOptions( /* numberOfLocations= */ 1, /* numberOfExcludedLocations= */ 0, /* numberOfTracks= */ multivariantPlaylist.variants.size(), /* numberOfExcludedTracks= */ variantExclusionCounter); @Nullable LoadErrorHandlingPolicy.FallbackSelection fallbackSelection = loadErrorHandlingPolicy.getFallbackSelectionFor(fallbackOptions, loadErrorInfo); if (fallbackSelection != null && fallbackSelection.type == LoadErrorHandlingPolicy.FALLBACK_TYPE_TRACK) { @Nullable MediaPlaylistBundle mediaPlaylistBundle = playlistBundles.get(url); if (mediaPlaylistBundle != null) { mediaPlaylistBundle.excludePlaylist(fallbackSelection.exclusionDurationMs); } } } return false; } } }
apache/cxf
35,449
rt/ws/rm/src/main/java/org/apache/cxf/ws/rm/soap/RetransmissionQueueImpl.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.ws.rm.soap; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import java.util.TimerTask; import java.util.concurrent.Executor; import java.util.concurrent.RejectedExecutionException; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.apache.cxf.Bus; import org.apache.cxf.binding.soap.SoapHeader; import org.apache.cxf.binding.soap.SoapMessage; import org.apache.cxf.binding.soap.SoapVersion; import org.apache.cxf.binding.soap.interceptor.SoapOutInterceptor; import org.apache.cxf.common.logging.LogUtils; import org.apache.cxf.endpoint.DeferredConduitSelector; import org.apache.cxf.endpoint.Endpoint; import org.apache.cxf.helpers.DOMUtils; import org.apache.cxf.interceptor.AbstractOutDatabindingInterceptor; import org.apache.cxf.interceptor.Fault; import org.apache.cxf.interceptor.Interceptor; import org.apache.cxf.io.CachedOutputStream; import org.apache.cxf.io.CachedOutputStreamCallback; import org.apache.cxf.io.WriteOnCloseOutputStream; import org.apache.cxf.message.Message; import org.apache.cxf.message.MessageUtils; import org.apache.cxf.phase.Phase; import org.apache.cxf.phase.PhaseChainCache; import org.apache.cxf.phase.PhaseInterceptor; import org.apache.cxf.phase.PhaseInterceptorChain; import org.apache.cxf.phase.PhaseManager; import org.apache.cxf.service.model.EndpointInfo; import org.apache.cxf.staxutils.PartialXMLStreamReader; import org.apache.cxf.staxutils.StaxUtils; import org.apache.cxf.staxutils.W3CDOMStreamWriter; import org.apache.cxf.transport.Conduit; import org.apache.cxf.transport.MessageObserver; import org.apache.cxf.workqueue.SynchronousExecutor; import org.apache.cxf.ws.addressing.AddressingProperties; import org.apache.cxf.ws.addressing.AttributedURIType; import org.apache.cxf.ws.addressing.EndpointReferenceType; import org.apache.cxf.ws.addressing.soap.MAPCodec; import org.apache.cxf.ws.policy.AssertionInfo; import org.apache.cxf.ws.policy.builder.jaxb.JaxbAssertion; import org.apache.cxf.ws.rm.ProtocolVariation; import org.apache.cxf.ws.rm.RMCaptureOutInterceptor; import org.apache.cxf.ws.rm.RMConfiguration; import org.apache.cxf.ws.rm.RMContextUtils; import org.apache.cxf.ws.rm.RMEndpoint; import org.apache.cxf.ws.rm.RMException; import org.apache.cxf.ws.rm.RMManager; import org.apache.cxf.ws.rm.RMMessageConstants; import org.apache.cxf.ws.rm.RMProperties; import org.apache.cxf.ws.rm.RMUtils; import org.apache.cxf.ws.rm.RetransmissionQueue; import org.apache.cxf.ws.rm.RetryStatus; import org.apache.cxf.ws.rm.SourceSequence; import org.apache.cxf.ws.rm.manager.RetryPolicyType; import org.apache.cxf.ws.rm.persistence.RMStore; import org.apache.cxf.ws.rm.v200702.Identifier; import org.apache.cxf.ws.rm.v200702.SequenceType; import org.apache.cxf.ws.rmp.v200502.RMAssertion; /** * */ public class RetransmissionQueueImpl implements RetransmissionQueue { private static final Logger LOG = LogUtils.getL7dLogger(RetransmissionQueueImpl.class); private final Map<String, List<ResendCandidate>> candidates = new HashMap<>(); private final Map<String, List<ResendCandidate>> suspendedCandidates = new HashMap<>(); private Resender resender; private RMManager manager; private int unacknowledgedCount; public RetransmissionQueueImpl(RMManager m) { manager = m; } public RMManager getManager() { return manager; } public void setManager(RMManager m) { manager = m; } public void addUnacknowledged(Message message) { cacheUnacknowledged(message); } /** * @param seq the sequence under consideration * @return the number of unacknowledged messages for that sequence */ public synchronized int countUnacknowledged(SourceSequence seq) { List<ResendCandidate> sequenceCandidates = getSequenceCandidates(seq); return sequenceCandidates == null ? 0 : sequenceCandidates.size(); } public int countUnacknowledged() { return unacknowledgedCount; } /** * @return true if there are no unacknowledged messages in the queue */ public boolean isEmpty() { return getUnacknowledged().isEmpty(); } /** * Purge all candidates for the given sequence that have been acknowledged. * * @param seq the sequence object. */ public void purgeAcknowledged(SourceSequence seq) { purgeCandidates(seq, false); } /** * Purge all candidates for the given sequence. This method is used to * terminate the sequence by force and release the resource associated * with the sequence. * * @param seq the sequence object. */ public void purgeAll(SourceSequence seq) { purgeCandidates(seq, true); } private void purgeCandidates(SourceSequence seq, boolean any) { Collection<Long> purged = new ArrayList<>(); Collection<ResendCandidate> resends = new ArrayList<>(); Identifier sid = seq.getIdentifier(); synchronized (this) { LOG.fine("Start purging resend candidates."); List<ResendCandidate> sequenceCandidates = getSequenceCandidates(seq); if (null != sequenceCandidates) { for (int i = sequenceCandidates.size() - 1; i >= 0; i--) { ResendCandidate candidate = sequenceCandidates.get(i); long m = candidate.getNumber(); if (any || seq.isAcknowledged(m)) { sequenceCandidates.remove(i); candidate.resolved(); unacknowledgedCount--; purged.add(m); resends.add(candidate); } } if (sequenceCandidates.isEmpty()) { candidates.remove(sid.getValue()); } } LOG.fine("Completed purging resend candidates."); } if (!purged.isEmpty()) { RMStore store = manager.getStore(); if (null != store) { store.removeMessages(sid, purged, true); } RMEndpoint rmEndpoint = seq.getSource().getReliableEndpoint(); for (ResendCandidate resend: resends) { rmEndpoint.handleAcknowledgment(sid.getValue(), resend.getNumber(), resend.getMessage()); } } } public List<Long> getUnacknowledgedMessageNumbers(SourceSequence seq) { List<Long> unacknowledged = new ArrayList<>(); List<ResendCandidate> sequenceCandidates = getSequenceCandidates(seq); if (null != sequenceCandidates) { for (int i = 0; i < sequenceCandidates.size(); i++) { ResendCandidate candidate = sequenceCandidates.get(i); unacknowledged.add(candidate.getNumber()); } } return unacknowledged; } public RetryStatus getRetransmissionStatus(SourceSequence seq, long num) { List<ResendCandidate> sequenceCandidates = getSequenceCandidates(seq); if (null != sequenceCandidates) { for (int i = 0; i < sequenceCandidates.size(); i++) { ResendCandidate candidate = sequenceCandidates.get(i); if (num == candidate.getNumber()) { return candidate; } } } return null; } public Map<Long, RetryStatus> getRetransmissionStatuses(SourceSequence seq) { Map<Long, RetryStatus> cp = new HashMap<>(); List<ResendCandidate> sequenceCandidates = getSequenceCandidates(seq); if (null != sequenceCandidates) { for (int i = 0; i < sequenceCandidates.size(); i++) { ResendCandidate candidate = sequenceCandidates.get(i); cp.put(candidate.getNumber(), candidate); } } return cp; } /** * Initiate resends. */ public void start() { if (null != resender) { return; } LOG.fine("Starting retransmission queue"); // setup resender resender = getDefaultResender(); } /** * Stops resending messages for the specified source sequence. */ public void stop(SourceSequence seq) { synchronized (this) { List<ResendCandidate> sequenceCandidates = getSequenceCandidates(seq); if (null != sequenceCandidates) { for (int i = sequenceCandidates.size() - 1; i >= 0; i--) { ResendCandidate candidate = sequenceCandidates.get(i); candidate.cancel(); } LOG.log(Level.FINE, "Cancelled resends for sequence {0}.", seq.getIdentifier().getValue()); } } } void stop() { } public void suspend(SourceSequence seq) { synchronized (this) { String key = seq.getIdentifier().getValue(); List<ResendCandidate> sequenceCandidates = candidates.remove(key); if (null != sequenceCandidates) { for (int i = sequenceCandidates.size() - 1; i >= 0; i--) { ResendCandidate candidate = sequenceCandidates.get(i); candidate.suspend(); } suspendedCandidates.put(key, sequenceCandidates); LOG.log(Level.FINE, "Suspended resends for sequence {0}.", key); } } } public void resume(SourceSequence seq) { synchronized (this) { String key = seq.getIdentifier().getValue(); List<ResendCandidate> sequenceCandidates = suspendedCandidates.remove(key); if (null != sequenceCandidates) { for (int i = 0; i < sequenceCandidates.size(); i++) { ResendCandidate candidate = sequenceCandidates.get(i); candidate.resume(); } candidates.put(key, sequenceCandidates); LOG.log(Level.FINE, "Resumed resends for sequence {0}.", key); } } } /** * @return the exponential backoff */ protected int getExponentialBackoff() { return DEFAULT_EXPONENTIAL_BACKOFF; } /** * @param message the message context * @return a ResendCandidate */ protected ResendCandidate createResendCandidate(Message message) { return new ResendCandidate(message); } /** * Accepts a new resend candidate. * * @param message the message object. * @return ResendCandidate */ protected ResendCandidate cacheUnacknowledged(Message message) { RMProperties rmps = RMContextUtils.retrieveRMProperties(message, true); SequenceType st = rmps.getSequence(); Identifier sid = st.getIdentifier(); String key = sid.getValue(); final ResendCandidate candidate; synchronized (this) { List<ResendCandidate> sequenceCandidates = getSequenceCandidates(key); if (null == sequenceCandidates) { sequenceCandidates = new ArrayList<>(); candidates.put(key, sequenceCandidates); } candidate = createResendCandidate(message); if (isSequenceSuspended(key)) { candidate.suspend(); } sequenceCandidates.add(candidate); unacknowledgedCount++; } LOG.fine("Cached unacknowledged message."); try { RMEndpoint rme = manager.getReliableEndpoint(message); rme.handleAccept(key, st.getMessageNumber(), message); } catch (RMException e) { LOG.log(Level.WARNING, "Could not find reliable endpoint for message"); } return candidate; } /** * @return a map relating sequence ID to a lists of un-acknowledged messages * for that sequence */ protected Map<String, List<ResendCandidate>> getUnacknowledged() { return candidates; } /** * @param seq the sequence under consideration * @return the list of resend candidates for that sequence * @pre called with mutex held */ protected List<ResendCandidate> getSequenceCandidates(SourceSequence seq) { return getSequenceCandidates(seq.getIdentifier().getValue()); } /** * @param key the sequence identifier under consideration * @return the list of resend candidates for that sequence * @pre called with mutex held */ protected List<ResendCandidate> getSequenceCandidates(String key) { List<ResendCandidate> sc = candidates.get(key); if (null == sc) { sc = suspendedCandidates.get(key); } return sc; } /** * @param key the sequence identifier under consideration * @return true if the sequence is currently suspended; false otherwise * @pre called with mutex held */ protected boolean isSequenceSuspended(String key) { return suspendedCandidates.containsKey(key); } /** * Represents a candidate for resend, i.e. an unacked outgoing message. */ protected class ResendCandidate implements Runnable, RetryStatus { private Message message; private long number; private Date next; private TimerTask nextTask; private int retries; private int maxRetries; private long nextInterval; private long backoff; private boolean pending; private boolean suspended; private boolean includeAckRequested; /** * @param m the unacked message */ protected ResendCandidate(Message m) { message = m; retries = 0; RMConfiguration cfg = manager.getEffectiveConfiguration(message); long baseRetransmissionInterval = cfg.getBaseRetransmissionInterval().longValue(); backoff = cfg.isExponentialBackoff() ? RetransmissionQueue.DEFAULT_EXPONENTIAL_BACKOFF : 1; next = new Date(System.currentTimeMillis() + baseRetransmissionInterval); nextInterval = baseRetransmissionInterval * backoff; RetryPolicyType rmrp = null != manager.getSourcePolicy() ? manager.getSourcePolicy().getRetryPolicy() : null; maxRetries = null != rmrp ? rmrp.getMaxRetries() : -1; AddressingProperties maps = RMContextUtils.retrieveMAPs(message, false, true); AttributedURIType to = null; if (null != maps) { to = maps.getTo(); maps.exposeAs(cfg.getAddressingNamespace()); } if (to != null && RMUtils.getAddressingConstants().getAnonymousURI().equals(to.getValue())) { LOG.log(Level.INFO, "Cannot resend to anonymous target. Not scheduling a resend."); return; } RMProperties rmprops = RMContextUtils.retrieveRMProperties(message, true); if (null != rmprops) { number = rmprops.getSequence().getMessageNumber(); } if (null != manager.getTimer() && maxRetries != 0) { schedule(); } } /** * Initiate resend asynchronsly. * * @param requestAcknowledge true if a AckRequest header is to be sent * with resend */ protected void initiate(boolean requestAcknowledge) { includeAckRequested = requestAcknowledge; pending = true; Endpoint ep = message.getExchange().getEndpoint(); Executor executor = ep.getExecutor(); if (null == executor) { executor = ep.getService().getExecutor(); if (executor == null) { executor = SynchronousExecutor.getInstance(); } else { LOG.log(Level.FINE, "Using service executor {0}", executor.getClass().getName()); } } else { LOG.log(Level.FINE, "Using endpoint executor {0}", executor.getClass().getName()); } try { executor.execute(this); } catch (RejectedExecutionException ex) { LOG.log(Level.SEVERE, "RESEND_INITIATION_FAILED_MSG", ex); } } public void run() { try { // ensure ACK wasn't received while this task was enqueued // on executor if (isPending()) { resender.resend(message, includeAckRequested); includeAckRequested = false; } } finally { attempted(); } } public long getNumber() { return number; } /** * @return number of resend attempts */ public int getRetries() { return retries; } /** * @return number of max resend attempts */ public int getMaxRetries() { return maxRetries; } /** * @return date of next resend */ public Date getNext() { return next; } /** * @return date of previous resend or null if no attempt is yet taken */ public Date getPrevious() { if (retries > 0) { return new Date(next.getTime() - nextInterval / backoff); } return null; } public long getNextInterval() { return nextInterval; } public long getBackoff() { return backoff; } public boolean isSuspended() { return suspended; } /** * @return if resend attempt is pending */ public synchronized boolean isPending() { return pending; } /** * ACK has been received for this candidate. */ protected synchronized void resolved() { pending = false; next = null; if (null != nextTask) { nextTask.cancel(); releaseSavedMessage(); } } /** * Cancel further resend (although no ACK has been received). */ protected synchronized void cancel() { if (null != nextTask) { nextTask.cancel(); releaseSavedMessage(); } } protected synchronized void suspend() { suspended = true; pending = false; //TODO release the message and later reload it upon resume //cancel(); if (null != nextTask) { nextTask.cancel(); } } protected synchronized void resume() { suspended = false; next = new Date(System.currentTimeMillis()); attempted(); } private void releaseSavedMessage() { CachedOutputStream cos = (CachedOutputStream)message.get(RMMessageConstants.SAVED_CONTENT); if (cos != null) { cos.releaseTempFileHold(); try { cos.close(); } catch (IOException e) { // ignore } } // REVISIT -- When reference holder is not needed anymore, code can be removed. Closeable closeable = (Closeable)message.get(RMMessageConstants.ATTACHMENTS_CLOSEABLE); if (closeable != null) { try { closeable.close(); } catch (IOException e) { // ignore } } } /** * @return associated message context */ protected Message getMessage() { return message; } /** * A resend has been attempted. Schedule the next attempt. */ protected synchronized void attempted() { pending = false; retries++; if (null != next && maxRetries != retries) { next = new Date(next.getTime() + nextInterval); nextInterval *= backoff; schedule(); } } protected final synchronized void schedule() { if (null == manager.getTimer()) { return; } class ResendTask extends TimerTask { ResendCandidate candidate; ResendTask(ResendCandidate c) { candidate = c; } @Override public void run() { if (!candidate.isPending()) { candidate.initiate(includeAckRequested); } } } nextTask = new ResendTask(this); try { manager.getTimer().schedule(nextTask, next); } catch (IllegalStateException ex) { LOG.log(Level.WARNING, "SCHEDULE_RESEND_FAILED_MSG", ex); } } } /** * Encapsulates actual resend logic (pluggable to facilitate unit testing) */ public interface Resender { /** * Resend mechanics. * * @param message * @param requestAcknowledge if a AckRequest should be included */ void resend(Message message, boolean requestAcknowledge); } /** * Create default Resender logic. * * @return default Resender */ protected final Resender getDefaultResender() { return new Resender() { public void resend(Message message, boolean requestAcknowledge) { RMProperties properties = RMContextUtils.retrieveRMProperties(message, true); SequenceType st = properties.getSequence(); if (st != null) { LOG.log(Level.INFO, "RESEND_MSG", st.getMessageNumber()); } if (message instanceof SoapMessage) { doResend((SoapMessage)message); } else { doResend(new SoapMessage(message)); } } }; } /** * Plug in replacement resend logic (facilitates unit testing). * * @param replacement resend logic */ protected void replaceResender(Resender replacement) { resender = replacement; } @SuppressWarnings("unchecked") protected JaxbAssertion<RMAssertion> getAssertion(AssertionInfo ai) { return (JaxbAssertion<RMAssertion>)ai.getAssertion(); } private void readHeaders(XMLStreamReader xmlReader, SoapMessage message) throws XMLStreamException { // read header portion of SOAP document into DOM SoapVersion version = message.getVersion(); XMLStreamReader filteredReader = new PartialXMLStreamReader(xmlReader, version.getBody()); Node nd = message.getContent(Node.class); W3CDOMStreamWriter writer = message.get(W3CDOMStreamWriter.class); final Document doc; if (writer != null) { StaxUtils.copy(filteredReader, writer); doc = writer.getDocument(); } else if (nd instanceof Document) { doc = (Document)nd; StaxUtils.readDocElements(doc, doc, filteredReader, false, false); } else { doc = StaxUtils.read(filteredReader); message.setContent(Node.class, doc); } // get the actual SOAP header Element element = doc.getDocumentElement(); QName header = version.getHeader(); List<Element> elemList = DOMUtils.findAllElementsByTagNameNS(element, header.getNamespaceURI(), header.getLocalPart()); for (Element elem : elemList) { // set all child elements as headers for message transmission Element hel = DOMUtils.getFirstElement(elem); while (hel != null) { SoapHeader sheader = new SoapHeader(DOMUtils.getElementQName(hel), hel); message.getHeaders().add(sheader); hel = DOMUtils.getNextElement(hel); } } } private void doResend(SoapMessage message) { InputStream is = null; try { // initialize copied interceptor chain for message PhaseInterceptorChain retransmitChain = manager.getRetransmitChain(message); ProtocolVariation protocol = RMContextUtils.getProtocolVariation(message); Endpoint endpoint = manager.getReliableEndpoint(message).getEndpoint(protocol); PhaseChainCache cache = new PhaseChainCache(); boolean after = true; if (retransmitChain == null) { // no saved retransmit chain, so construct one from scratch (won't work for WS-Security on server, so // need to fix) retransmitChain = buildRetransmitChain(endpoint, cache); after = false; } message.setInterceptorChain(retransmitChain); // clear flag for SOAP out interceptor so envelope will be written message.remove(SoapOutInterceptor.WROTE_ENVELOPE_START); // discard all saved content Set<Class<?>> formats = message.getContentFormats(); List<CachedOutputStreamCallback> callbacks = null; for (Class<?> clas: formats) { Object content = message.getContent(clas); if (content != null) { LOG.info("Removing " + clas.getName() + " content of actual type " + content.getClass().getName()); message.removeContent(clas); if (clas == OutputStream.class && content instanceof WriteOnCloseOutputStream) { callbacks = ((WriteOnCloseOutputStream)content).getCallbacks(); } } } // read SOAP headers from saved input stream CachedOutputStream cos = (CachedOutputStream)message.get(RMMessageConstants.SAVED_CONTENT); cos.holdTempFile(); // CachedOutputStream is hold until delivering was successful is = cos.getInputStream(); // instance is needed to close input stream later on XMLStreamReader reader = StaxUtils.createXMLStreamReader(is, StandardCharsets.UTF_8.name()); message.getHeaders().clear(); if (reader.getEventType() != XMLStreamConstants.START_ELEMENT && reader.nextTag() != XMLStreamConstants.START_ELEMENT) { throw new IllegalStateException("No document found"); } readHeaders(reader, message); int event; while ((event = reader.nextTag()) != XMLStreamConstants.START_ELEMENT) { if (event == XMLStreamConstants.END_ELEMENT) { throw new IllegalStateException("No body content present"); } } // set message addressing properties AddressingProperties maps = MAPCodec.getInstance(message.getExchange().getBus()).unmarshalMAPs(message); RMContextUtils.storeMAPs(maps, message, true, MessageUtils.isRequestor(message)); AttributedURIType to = null; if (null != maps) { to = maps.getTo(); } if (null == to) { LOG.log(Level.SEVERE, "NO_ADDRESS_FOR_RESEND_MSG"); return; } if (RMUtils.getAddressingConstants().getAnonymousURI().equals(to.getValue())) { LOG.log(Level.FINE, "Cannot resend to anonymous target"); return; } // initialize conduit for new message Conduit c = message.getExchange().getConduit(message); if (c == null) { c = buildConduit(message, endpoint, to); } c.prepare(message); // replace standard message marshaling with copy from saved stream ListIterator<Interceptor<? extends Message>> iterator = retransmitChain.getIterator(); while (iterator.hasNext()) { Interceptor<? extends Message> incept = iterator.next(); // remove JAX-WS interceptors which handle message modes and such if (incept.getClass().getName().startsWith("org.apache.cxf.jaxws.interceptors")) { retransmitChain.remove(incept); } else if (incept instanceof PhaseInterceptor && Phase.MARSHAL.equals(((PhaseInterceptor<?>)incept).getPhase())) { // remove any interceptors from the marshal phase retransmitChain.remove(incept); } } retransmitChain.add(new CopyOutInterceptor(reader)); // restore callbacks on output stream if (callbacks != null) { OutputStream os = message.getContent(OutputStream.class); if (os != null) { WriteOnCloseOutputStream woc; if (os instanceof WriteOnCloseOutputStream) { woc = (WriteOnCloseOutputStream)os; } else { woc = new WriteOnCloseOutputStream(os); message.setContent(OutputStream.class, woc); } for (CachedOutputStreamCallback cb: callbacks) { woc.registerCallback(cb); } } } // send the message message.put(RMMessageConstants.RM_RETRANSMISSION, Boolean.TRUE); if (after) { retransmitChain.doInterceptStartingAfter(message, RMCaptureOutInterceptor.class.getName()); } else { retransmitChain.doIntercept(message); } if (LOG.isLoggable(Level.INFO)) { RMProperties rmps = RMContextUtils.retrieveRMProperties(message, true); SequenceType seq = rmps.getSequence(); LOG.log(Level.INFO, "Retransmitted message " + seq.getMessageNumber() + " in sequence " + seq.getIdentifier().getValue()); } } catch (Exception ex) { LOG.log(Level.SEVERE, "RESEND_FAILED_MSG", ex); } finally { // make sure to always close InputStreams of the CachedOutputStream to avoid leaving temp files undeleted if (null != is) { try { is.close(); } catch (IOException e) { // Ignore } } } } /** * @param message * @param endpoint * @param to * @return */ protected Conduit buildConduit(SoapMessage message, final Endpoint endpoint, AttributedURIType to) { Conduit c; final String address = to.getValue(); DeferredConduitSelector cs = new DeferredConduitSelector() { @Override public synchronized Conduit selectConduit(Message message) { final Conduit conduit; EndpointInfo endpointInfo = endpoint.getEndpointInfo(); EndpointReferenceType original = endpointInfo.getTarget(); try { if (null != address) { endpointInfo.setAddress(address); } conduit = super.selectConduit(message); } finally { endpointInfo.setAddress(original); } conduits.clear(); return conduit; } }; cs.setEndpoint(endpoint); c = cs.selectConduit(message); // REVISIT // use application endpoint message observer instead? c.setMessageObserver(new MessageObserver() { public void onMessage(Message message) { LOG.fine("Ignoring response to resent message."); } }); cs.close(); message.getExchange().setConduit(c); return c; } /** * @param endpoint * @param cache * @return */ protected PhaseInterceptorChain buildRetransmitChain(final Endpoint endpoint, PhaseChainCache cache) { PhaseInterceptorChain retransmitChain; Bus bus = getManager().getBus(); List<Interceptor<? extends Message>> i1 = bus.getOutInterceptors(); if (LOG.isLoggable(Level.FINE)) { LOG.fine("Interceptors contributed by bus: " + i1); } List<Interceptor<? extends Message>> i2 = endpoint.getOutInterceptors(); if (LOG.isLoggable(Level.FINE)) { LOG.fine("Interceptors contributed by endpoint: " + i2); } List<Interceptor<? extends Message>> i3 = endpoint.getBinding().getOutInterceptors(); if (LOG.isLoggable(Level.FINE)) { LOG.fine("Interceptors contributed by binding: " + i3); } PhaseManager pm = bus.getExtension(PhaseManager.class); retransmitChain = cache.get(pm.getOutPhases(), i1, i2, i3); return retransmitChain; } public static class CopyOutInterceptor extends AbstractOutDatabindingInterceptor { private final XMLStreamReader reader; public CopyOutInterceptor(XMLStreamReader rdr) { super(Phase.MARSHAL); reader = rdr; } @Override public void handleMessage(Message message) throws Fault { try { XMLStreamWriter writer = message.getContent(XMLStreamWriter.class); StaxUtils.copy(reader, writer); } catch (XMLStreamException e) { throw new Fault("COULD_NOT_READ_XML_STREAM", LOG, e); } } } }
googleapis/google-cloud-java
35,194
java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/UpdateOccurrenceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1beta1; /** * * * <pre> * Request to update an occurrence. * </pre> * * Protobuf type {@code grafeas.v1beta1.UpdateOccurrenceRequest} */ public final class UpdateOccurrenceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1beta1.UpdateOccurrenceRequest) UpdateOccurrenceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateOccurrenceRequest.newBuilder() to construct. private UpdateOccurrenceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateOccurrenceRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateOccurrenceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.UpdateOccurrenceRequest.class, io.grafeas.v1beta1.UpdateOccurrenceRequest.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OCCURRENCE_FIELD_NUMBER = 2; private io.grafeas.v1beta1.Occurrence occurrence_; /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> * * @return Whether the occurrence field is set. */ @java.lang.Override public boolean hasOccurrence() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> * * @return The occurrence. */ @java.lang.Override public io.grafeas.v1beta1.Occurrence getOccurrence() { return occurrence_ == null ? io.grafeas.v1beta1.Occurrence.getDefaultInstance() : occurrence_; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ @java.lang.Override public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrenceOrBuilder() { return occurrence_ == null ? io.grafeas.v1beta1.Occurrence.getDefaultInstance() : occurrence_; } public static final int UPDATE_MASK_FIELD_NUMBER = 3; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getOccurrence()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getOccurrence()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1beta1.UpdateOccurrenceRequest)) { return super.equals(obj); } io.grafeas.v1beta1.UpdateOccurrenceRequest other = (io.grafeas.v1beta1.UpdateOccurrenceRequest) obj; if (!getName().equals(other.getName())) return false; if (hasOccurrence() != other.hasOccurrence()) return false; if (hasOccurrence()) { if (!getOccurrence().equals(other.getOccurrence())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasOccurrence()) { hash = (37 * hash) + OCCURRENCE_FIELD_NUMBER; hash = (53 * hash) + getOccurrence().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1beta1.UpdateOccurrenceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update an occurrence. * </pre> * * Protobuf type {@code grafeas.v1beta1.UpdateOccurrenceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1beta1.UpdateOccurrenceRequest) io.grafeas.v1beta1.UpdateOccurrenceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1beta1.UpdateOccurrenceRequest.class, io.grafeas.v1beta1.UpdateOccurrenceRequest.Builder.class); } // Construct using io.grafeas.v1beta1.UpdateOccurrenceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getOccurrenceFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; occurrence_ = null; if (occurrenceBuilder_ != null) { occurrenceBuilder_.dispose(); occurrenceBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1beta1.Grafeas .internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_descriptor; } @java.lang.Override public io.grafeas.v1beta1.UpdateOccurrenceRequest getDefaultInstanceForType() { return io.grafeas.v1beta1.UpdateOccurrenceRequest.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1beta1.UpdateOccurrenceRequest build() { io.grafeas.v1beta1.UpdateOccurrenceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1beta1.UpdateOccurrenceRequest buildPartial() { io.grafeas.v1beta1.UpdateOccurrenceRequest result = new io.grafeas.v1beta1.UpdateOccurrenceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(io.grafeas.v1beta1.UpdateOccurrenceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.occurrence_ = occurrenceBuilder_ == null ? occurrence_ : occurrenceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1beta1.UpdateOccurrenceRequest) { return mergeFrom((io.grafeas.v1beta1.UpdateOccurrenceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1beta1.UpdateOccurrenceRequest other) { if (other == io.grafeas.v1beta1.UpdateOccurrenceRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasOccurrence()) { mergeOccurrence(other.getOccurrence()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getOccurrenceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private io.grafeas.v1beta1.Occurrence occurrence_; private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1beta1.Occurrence, io.grafeas.v1beta1.Occurrence.Builder, io.grafeas.v1beta1.OccurrenceOrBuilder> occurrenceBuilder_; /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> * * @return Whether the occurrence field is set. */ public boolean hasOccurrence() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> * * @return The occurrence. */ public io.grafeas.v1beta1.Occurrence getOccurrence() { if (occurrenceBuilder_ == null) { return occurrence_ == null ? io.grafeas.v1beta1.Occurrence.getDefaultInstance() : occurrence_; } else { return occurrenceBuilder_.getMessage(); } } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ public Builder setOccurrence(io.grafeas.v1beta1.Occurrence value) { if (occurrenceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } occurrence_ = value; } else { occurrenceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ public Builder setOccurrence(io.grafeas.v1beta1.Occurrence.Builder builderForValue) { if (occurrenceBuilder_ == null) { occurrence_ = builderForValue.build(); } else { occurrenceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ public Builder mergeOccurrence(io.grafeas.v1beta1.Occurrence value) { if (occurrenceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && occurrence_ != null && occurrence_ != io.grafeas.v1beta1.Occurrence.getDefaultInstance()) { getOccurrenceBuilder().mergeFrom(value); } else { occurrence_ = value; } } else { occurrenceBuilder_.mergeFrom(value); } if (occurrence_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ public Builder clearOccurrence() { bitField0_ = (bitField0_ & ~0x00000002); occurrence_ = null; if (occurrenceBuilder_ != null) { occurrenceBuilder_.dispose(); occurrenceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ public io.grafeas.v1beta1.Occurrence.Builder getOccurrenceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getOccurrenceFieldBuilder().getBuilder(); } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ public io.grafeas.v1beta1.OccurrenceOrBuilder getOccurrenceOrBuilder() { if (occurrenceBuilder_ != null) { return occurrenceBuilder_.getMessageOrBuilder(); } else { return occurrence_ == null ? io.grafeas.v1beta1.Occurrence.getDefaultInstance() : occurrence_; } } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1beta1.Occurrence occurrence = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1beta1.Occurrence, io.grafeas.v1beta1.Occurrence.Builder, io.grafeas.v1beta1.OccurrenceOrBuilder> getOccurrenceFieldBuilder() { if (occurrenceBuilder_ == null) { occurrenceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1beta1.Occurrence, io.grafeas.v1beta1.Occurrence.Builder, io.grafeas.v1beta1.OccurrenceOrBuilder>( getOccurrence(), getParentForChildren(), isClean()); occurrence_ = null; } return occurrenceBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000004); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000004; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1beta1.UpdateOccurrenceRequest) } // @@protoc_insertion_point(class_scope:grafeas.v1beta1.UpdateOccurrenceRequest) private static final io.grafeas.v1beta1.UpdateOccurrenceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1beta1.UpdateOccurrenceRequest(); } public static io.grafeas.v1beta1.UpdateOccurrenceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateOccurrenceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateOccurrenceRequest>() { @java.lang.Override public UpdateOccurrenceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateOccurrenceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateOccurrenceRequest> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1beta1.UpdateOccurrenceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googlearchive/science-journal
35,450
OpenScienceJournal/whistlepunk_library/src/main/java/com/google/android/apps/forscience/whistlepunk/cloudsync/DriveSyncManager.java
package com.google.android.apps.forscience.whistlepunk.cloudsync; import android.content.Context; import android.util.Log; import com.google.android.apps.forscience.whistlepunk.AppSingleton; import com.google.android.apps.forscience.whistlepunk.DataController; import com.google.android.apps.forscience.whistlepunk.PictureUtils; import com.google.android.apps.forscience.whistlepunk.RecordingDataController; import com.google.android.apps.forscience.whistlepunk.RxDataController; import com.google.android.apps.forscience.whistlepunk.WhistlePunkApplication; import com.google.android.apps.forscience.whistlepunk.accounts.AppAccount; import com.google.android.apps.forscience.whistlepunk.analytics.TrackerConstants; import com.google.android.apps.forscience.whistlepunk.data.GoosciExperimentLibrary; import com.google.android.apps.forscience.whistlepunk.filemetadata.Experiment; import com.google.android.apps.forscience.whistlepunk.filemetadata.ExperimentLibraryManager; import com.google.android.apps.forscience.whistlepunk.filemetadata.ExperimentOverviewPojo; import com.google.android.apps.forscience.whistlepunk.filemetadata.FileMetadataUtil; import com.google.android.apps.forscience.whistlepunk.filemetadata.FileSyncCollection; import com.google.android.apps.forscience.whistlepunk.filemetadata.Label; import com.google.android.apps.forscience.whistlepunk.filemetadata.LocalSyncManager; import com.google.android.apps.forscience.whistlepunk.filemetadata.Trial; import com.google.android.apps.forscience.whistlepunk.metadata.GoosciExperiment; import com.google.android.apps.forscience.whistlepunk.metadata.GoosciLabel; import com.google.android.apps.forscience.whistlepunk.metadata.GoosciScalarSensorData; import com.google.android.apps.forscience.whistlepunk.sensorapi.ScalarSensorDumpReader; import com.google.api.client.http.HttpTransport; import com.google.api.client.json.JsonFactory; import com.google.common.base.Strings; import com.google.common.base.Supplier; import java.io.FileInputStream; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.Executor; import java.util.concurrent.Executors; /** * Manages a connection to a user's Google Drive, and provides a home for the code that interacts * with the Google Drive API. There is one instance of this class for each signed in user, and * provides a handle to their drive. */ public class DriveSyncManager implements CloudSyncManager { private static final String TAG = "DriveSyncManager"; public static final String EXPERIMENT_LIBRARY_PROTO = "experiment_library.proto"; public static final String EXPERIMENT_PROTO_FILE = "experiment.proto"; private final DriveApi driveApi; private final AppAccount appAccount; private final DataController dc; private final RecordingDataController rdc; private boolean sjFolderExists = false; // State-holders for kicking off a second sync if one is requested during an active sync. private boolean syncAgain = false; private String lastLogMessage = ""; DriveSyncManager( AppAccount appAccount, DataController dc, HttpTransport transport, JsonFactory jsonFactory, Context applicationContext, Supplier<DriveApi> driveSupplier, RecordingDataController rdc) { this( appAccount, dc, driveSupplier.get().init(transport, jsonFactory, appAccount, applicationContext), rdc); } public DriveSyncManager( AppAccount appAccount, DataController dc, DriveApi driveWrapper, RecordingDataController rdc) { this.appAccount = appAccount; this.driveApi = driveWrapper; this.dc = dc; this.rdc = rdc; } @Override public void syncExperimentLibrary(Context context, String logMessage) throws IOException { // If we are currently syncing, we should wait on this new sync request, and sync again when the // first sync completes. if (isCurrentlySyncing(context)) { syncAgain = true; lastLogMessage = logMessage; return; } if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, logMessage); } if (DriveSyncAndroidService.syncExperimentLibraryFile(context, appAccount)) { AppSingleton.getInstance(context).setSyncServiceBusy(true); } } @Override public void logCloudInfo(String tag) { Executor thread = Executors.newSingleThreadExecutor(); thread.execute( new Runnable() { @Override public void run() { if (Log.isLoggable(tag, Log.WARN)) { boolean remoteExists = false; try { String remoteLibraryId = driveApi.getRemoteExperimentLibraryFileId(); if (remoteLibraryId != null && driveApi.getFileExists(remoteLibraryId)) { remoteExists = true; } } catch (IOException ioe) { Log.w(tag, "IO Exception checking for remote library"); } boolean sjFolderExists = false; try { sjFolderExists = driveApi.sjFolderExists(); } catch (IOException ioe) { Log.w(tag, "IO Exception checking for SJ Folder"); } int experimentCount = 0; try { experimentCount = driveApi.countSJExperiments(); } catch (IOException ioe) { Log.w(tag, "IO Exception counting experiments"); } Log.w(tag, "Remote Library Exists: " + remoteExists); Log.w(tag, "SJ Folder Exists: " + sjFolderExists); Log.w(tag, "Experiment Count: " + experimentCount); } } }); } private Boolean isCurrentlySyncing(Context context) { // With a default value, "blockingMostRecent" doesn't actually block. It returns the default. return AppSingleton.getInstance(context) .whenSyncBusyChanges() .blockingMostRecent(false) .iterator() .next(); } /** * Called by DriveSyncAndroidService to handle Drive syncing of library in the background thread. * This method will likely return while background processes are still occurring. */ void syncExperimentLibraryInBackgroundThread( Context context, ExperimentLibraryManager experimentLibraryManager, LocalSyncManager localSyncManager) throws IOException { Map<String, FileSyncCollection> fileSyncCollectionMap = new HashMap<>(); if (!appAccount.isSignedIn()) { AppSingleton.getInstance(context).setSyncServiceBusy(false); return; } long remoteLibraryVersion = getRemoteLibraryVersion(); long localLibraryVersion = localSyncManager.getLastSyncedLibraryVersion(); if (remoteLibraryVersion == localLibraryVersion) { checkForDeletions(experimentLibraryManager); AppSingleton.getInstance(context).setSyncServiceBusy(false); syncAgain = false; return; } // Is the same user using multiple devices? if (localLibraryVersion == 0) { WhistlePunkApplication.getUsageTracker(context) .trackEvent( TrackerConstants.CATEGORY_SIGN_IN, TrackerConstants.ACTION_SYNC_EXISTING_ACCOUNT, null, 0); } // Try to get the remote experiment library GoosciExperimentLibrary.ExperimentLibrary remoteLibrary = downloadExperimentLibraryFile(); Map<String, Long> driveExperimentVersions = driveApi.getAllDriveExperimentVersions(); if (remoteLibrary != null) { // If the remote experiment library already exists, merge it into our local library experimentLibraryManager.merge(remoteLibrary, localSyncManager); } String sjDirectoryId = getSJDirectoryId(experimentLibraryManager); experimentLibraryManager.setFolderId(sjDirectoryId); List<ExperimentOverviewPojo> overviews = dc.blockingGetExperimentOverviews(true); for (String id : experimentLibraryManager.getKnownExperiments()) { // For each experiment that we know about (past or current) ExperimentOverviewPojo matching = null; for (ExperimentOverviewPojo overview : overviews) { // For each experiment that is currently not-deleted if (id.equals(overview.getExperimentId())) { // Find if the known experiment currently exists locally matching = overview; } } if (experimentLibraryManager.isDeleted(id)) { // If the known experiment has been marked deleted if (matching != null) { // If it exists locally, delete it. if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, "Deleting locally: Marked deleted in library"); } deleteExperiment(id); deleteExperimentRemotely(context, experimentLibraryManager, id, sjDirectoryId); } if (localSyncManager.getDirty(id)) { if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, "Deleting locally: Marked deleted and dirty in library"); } deleteExperimentRemotely(context, experimentLibraryManager, id, sjDirectoryId); localSyncManager.setDirty(id, false); } } else { String remoteFileId = experimentLibraryManager.getFileId(id); long remoteDriveExperimentVersion = -1; if (remoteFileId != null && driveExperimentVersions.get(remoteFileId) != null) { remoteDriveExperimentVersion = driveExperimentVersions.get(remoteFileId); } // Else the experiment hasn't been deleted if (matching == null) { // And it doesn't exist locally, so add it, if it exists remotely! if (Strings.isNullOrEmpty(remoteFileId)) { // This happens if the ExperimentLibrary file doesn't have a file ID yet. For example // see bug 123845261, where an iOS device and an Android device have both been offline // and both come online and attempt to sync at the same time. One device could update // the experiment library file and before it finished uploading all the individual // experiments, the other device downloads the experiment library file. // TODO(b/135479937): Reconcile experiments in drive. // For now, track this to find out how often this happens in the wild. WhistlePunkApplication.getUsageTracker(context) .trackEvent( TrackerConstants.CATEGORY_SYNC, TrackerConstants.ACTION_MISSING_REMOTE_FILE_ID, null, 0); continue; } else if (!driveApi.getFileExists(remoteFileId)) { if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, "Marking deleted: package not found and local not found"); } experimentLibraryManager.setDeleted(id, true); } else { Experiment newExperiment = Experiment.newExperiment( context, appAccount, experimentLibraryManager, experimentLibraryManager.getModified(id), id, 0, experimentLibraryManager.getModified(id)); fileSyncCollectionMap.put( id, syncNewRemoteExperimentProtoFileInBackgroundThread( context, id, experimentLibraryManager, localSyncManager, dc, newExperiment)); } } else { // It does exist locally. Let's sync it! try { fileSyncCollectionMap.put(id, syncExperimentProtoFileInBackgroundThread( context, id, remoteDriveExperimentVersion, experimentLibraryManager, localSyncManager)); } catch (IOException ioe) { Log.e(TAG, "IOException", ioe); } } } AppSingleton.getInstance(context).notifyNewExperimentSynced(); } transferFileSyncCollections(context, experimentLibraryManager, fileSyncCollectionMap); AppSingleton.getInstance(context).notifyNewExperimentSynced(); // Now upload the library back to Drive remoteLibraryVersion = uploadExperimentLibraryToDrive(); localSyncManager.setLastSyncedLibraryVersion(remoteLibraryVersion); AppSingleton.getInstance(context).setSyncServiceBusy(false); sjFolderExists = false; // If a sync was requested while we were busy, start another sync. if (syncAgain) { syncAgain = false; syncExperimentLibrary(context, lastLogMessage); } cleanUpDrive(context, experimentLibraryManager, localSyncManager, sjDirectoryId); } private long uploadExperimentLibraryToDrive() throws IOException { synchronized (appAccount.getLockForExperimentLibraryFile()) { java.io.File libraryFile = getLocalLibraryFile(); String fileId = driveApi.getRemoteExperimentLibraryFileId(); if (fileId == null) { // Either by inserting, if it didn't already exist on Drive driveApi.insertExperimentLibraryFile(libraryFile); fileId = driveApi.getRemoteExperimentLibraryFileId(); } else { // Or by updating, if it did. Drive doesn't have an upsert action, which makes this clunky. driveApi.updateExperimentLibraryFile(libraryFile, fileId); } return driveApi.getFileVersion(fileId); } } private java.io.File downloadFileInBackgroundThread( String experimentId, String fileName, ExperimentLibraryManager elm) throws IOException { if (!appAccount.isSignedIn()) { return null; } if (Strings.isNullOrEmpty(fileName)) { return null; } String packageId = elm.getFileId(experimentId); java.io.File localExperimentDirectory = FileMetadataUtil.getInstance().getExperimentDirectory(appAccount, experimentId); return driveApi.downloadExperimentAsset(packageId, localExperimentDirectory, fileName); } private void uploadFileInBackgroundThread( String experimentId, String fileName, ExperimentLibraryManager elm) throws IOException { if (!appAccount.isSignedIn()) { return; } String packageId = elm.getFileId(experimentId); java.io.File localExperimentDirectory = FileMetadataUtil.getInstance().getExperimentDirectory(appAccount, experimentId); java.io.File localFile = new java.io.File(localExperimentDirectory, fileName); driveApi.uploadFile(localFile, packageId); } private void downloadTrialInBackgroundThread( String experimentId, String trialId, ExperimentLibraryManager elm) throws IOException { if (!appAccount.isSignedIn()) { return; } java.io.File localFile = downloadFileInBackgroundThread( experimentId, FileMetadataUtil.getInstance().getTrialProtoFileName(trialId), elm); ScalarSensorDumpReader dumpReader = new ScalarSensorDumpReader(rdc); HashMap<String, String> trialIdMap = new HashMap<>(); trialIdMap.put(trialId, trialId); GoosciScalarSensorData.ScalarSensorData dataProto = null; if (localFile.canRead()) { try (FileInputStream fis = new FileInputStream(localFile)) { dataProto = GoosciScalarSensorData.ScalarSensorData.parseFrom(fis); } catch (Exception e) { Log.e(TAG, "Exception reading trial data file", e); } } if (dataProto == null) { dataProto = GoosciScalarSensorData.ScalarSensorData.getDefaultInstance(); } dumpReader.readData(dataProto, trialIdMap); } private void uploadTrialInBackgroundThread( Context context, String experimentId, String trialId, ExperimentLibraryManager elm) throws IOException { if (!appAccount.isSignedIn()) { return; } java.io.File localFile = RxDataController.writeTrialProtoToFile(dc, experimentId, trialId).blockingGet(); try { if (localFile == null) { throw new IOException("Trial not found"); } else { // Get the remote Drive "Science Journal" folder. String sjDirectoryId = getSJDirectoryId(elm); // Get the Drive embedded package ID and metadata for the Experiment String packageId = getExperimentPackageId(context, elm, experimentId, sjDirectoryId); driveApi.uploadFile(localFile, packageId); } } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "File write failed", ioe); } } } private GoosciExperimentLibrary.ExperimentLibrary downloadExperimentLibraryFile() throws IOException { String fileId = driveApi.getRemoteExperimentLibraryFileId(); if (fileId == null) { return null; } return driveApi.downloadExperimentLibraryFile(fileId); } private long getRemoteLibraryVersion() throws IOException { String fileId = driveApi.getRemoteExperimentLibraryFileId(); if (fileId == null) { return -1; } return driveApi.getFileVersion(fileId); } private java.io.File getLocalLibraryFile() { return new java.io.File(appAccount.getFilesDir(), EXPERIMENT_LIBRARY_PROTO); } /** * Called by DriveSyncAndroidService to handle Drive syncing of experiments in the background * thread. */ FileSyncCollection syncNewRemoteExperimentProtoFileInBackgroundThread( Context context, String experimentId, ExperimentLibraryManager elm, LocalSyncManager lsm, DataController dc, Experiment newExperiment) throws IOException { if (!appAccount.isSignedIn()) { return new FileSyncCollection(); } getExperimentProtoFileFromRemoteInBackgroundThread( context, experimentId, elm, lsm, dc, newExperiment); WhistlePunkApplication.getUsageTracker(context) .trackEvent( TrackerConstants.CATEGORY_SYNC, TrackerConstants.ACTION_SYNC_EXPERIMENT_FROM_DRIVE, null, 0); return new FileSyncCollection(); } /** * Called by DriveSyncAndroidService to handle Drive syncing of experiments in the background * thread. */ FileSyncCollection syncExperimentProtoFileInBackgroundThread( Context context, String experimentId, long remoteFileVersion, ExperimentLibraryManager elm, LocalSyncManager localSyncManager) throws IOException { if (!appAccount.isSignedIn()) { return new FileSyncCollection(); } // Get the remote Drive "Science Journal" folder. String sjDirectoryId = getSJDirectoryId(elm); // Get the Drive embedded package ID and metadata for the Experiment String packageId = getExperimentPackageId(context, elm, experimentId, sjDirectoryId); if (getPackageFormatVersion(packageId) > 1) { // This is a new experiment version. We can't work with it. // Versioning design: // https://docs.google.com/document/d/1d9sImPmSW4CJHzEiabxl0bxkrJgU5jTtAKq7rjoUEkA/edit return new FileSyncCollection(); } // Handles case where user has manually deleted the experiment package. if (!driveApi.getFileExists(packageId)) { if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, "Deleting locally: package not found"); } deleteExperiment(experimentId); return new FileSyncCollection(); } DriveFile serverExperimentProtoMetadata = null; if (remoteFileVersion < 0) { serverExperimentProtoMetadata = driveApi.getExperimentProtoMetadata(packageId); if (serverExperimentProtoMetadata != null) { remoteFileVersion = serverExperimentProtoMetadata.getVersion(); } } // Handles cases where proto doesn't exist in remote package. if (remoteFileVersion < 0 && serverExperimentProtoMetadata == null) { if (!RxDataController.experimentExists(dc, experimentId).blockingGet()) { return new FileSyncCollection(); } Experiment localExperiment = RxDataController.getExperimentById(dc, experimentId).blockingGet(); if (localExperiment.isEmpty()) { return new FileSyncCollection(); } if (localExperiment.getLastUsedTime() > elm.getModified(experimentId)) { elm.setModified(experimentId, localExperiment.getLastUsedTime()); } if (localExperiment.isArchived() != elm.isArchived(experimentId)) { localExperiment.setArchived(context, appAccount, elm.isArchived(experimentId)); } RxDataController.updateExperimentEvenIfNotActive( dc, localExperiment, elm.getModified(experimentId), false) .blockingAwait(); localSyncManager.setServerArchived(experimentId, elm.isArchived(experimentId)); insertExperimentProto(experimentId, packageId, localSyncManager, localExperiment.getTitle()); for (Trial t : localExperiment.getTrials()) { uploadTrialInBackgroundThread(context, experimentId, t.getTrialId(), elm); for (Label l : t.getLabels()) { uploadLabelIfNecessary(l, experimentId, elm); } } for (Label l : localExperiment.getLabels()) { uploadLabelIfNecessary(l, experimentId, elm); } } else { // Get the remote and local versions, as well as if there have been local edits. long localExperimentVersion = localSyncManager.getLastSyncedVersion(experimentId); boolean isDirty = localSyncManager.getDirty(experimentId); if (!RxDataController.experimentExists(dc, experimentId).blockingGet()) { return new FileSyncCollection(); } Experiment localExperiment = RxDataController.getExperimentById(dc, experimentId).blockingGet(); localExperiment.cleanTrials(context, appAccount); if (localExperiment.getLastUsedTime() > elm.getModified(experimentId)) { elm.setModified(experimentId, localExperiment.getLastUsedTime()); } else { localExperiment.setLastUsedTime(elm.getModified(experimentId)); } // if there have been remote changes OR local changes, we have to merge. if (remoteFileVersion != localExperimentVersion || isDirty || elm.isArchived(experimentId) != localSyncManager.getServerArchived(experimentId)) { // Download remote file and merge with local. if (serverExperimentProtoMetadata == null) { serverExperimentProtoMetadata = driveApi.getExperimentProtoMetadata(packageId); } GoosciExperiment.Experiment remoteExperiment = downloadExperimentProto(serverExperimentProtoMetadata.getId()); FileSyncCollection sync = RxDataController.mergeExperiment( dc, experimentId, Experiment.fromExperiment(remoteExperiment, new ExperimentOverviewPojo()), false) .blockingGet(); localSyncManager.setServerArchived(experimentId, elm.isArchived(experimentId)); localExperiment.setArchived(context, appAccount, elm.isArchived(experimentId)); updateExperimentProto( experimentId, serverExperimentProtoMetadata, localSyncManager, packageId, localExperiment.getTitle()); RxDataController.updateExperimentEvenIfNotActive( dc, localExperiment, elm.getModified(experimentId), false) .blockingAwait(); try { String imagePath = localExperiment.getImagePath(); if (Strings.isNullOrEmpty(imagePath)) { return sync; } java.io.File overviewImage = new java.io.File( PictureUtils.getExperimentOverviewFullImagePath( appAccount, localExperiment.getPathRelativeToAccountRoot( localExperiment.getImagePath()))); if (overviewImage.exists()) { uploadFileInBackgroundThread( experimentId, FileMetadataUtil.getInstance() .getRelativePathInExperiment(experimentId, overviewImage), elm); } else { downloadFileInBackgroundThread( experimentId, FileMetadataUtil.getInstance() .getRelativePathInExperiment(experimentId, overviewImage), elm); } return sync; } catch (IOException ioe) { Log.e(TAG, "IOException", ioe); } } } return new FileSyncCollection(); } /** * Called by DriveSyncAndroidService to handle Drive syncing of experiments in the background * thread. */ private void getExperimentProtoFileFromRemoteInBackgroundThread( Context context, String experimentId, ExperimentLibraryManager elm, LocalSyncManager localSyncManager, DataController dc, Experiment newExperiment) throws IOException { // Get the remote Drive "Science Journal" folder. String sjDirectoryId = getSJDirectoryId(elm); // Get the Drive embedded package ID and metadata for the Experiment String packageId = getExperimentPackageId(context, elm, experimentId, sjDirectoryId); DriveFile serverExperimentProtoMetadata = driveApi.getExperimentProtoMetadata(packageId); if (serverExperimentProtoMetadata == null) { deleteExperiment(experimentId); return; } GoosciExperiment.Experiment remoteExperiment = downloadExperimentProto(serverExperimentProtoMetadata.getId()); newExperiment.setTitle(remoteExperiment.getTitle()); try { RxDataController.addExperiment(dc, newExperiment).blockingAwait(); } catch (IllegalArgumentException e) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "Experiment already added to data controller", e); } } Experiment localExperiment = RxDataController.getExperimentById(dc, experimentId).blockingGet(); localSyncManager.setServerArchived(experimentId, elm.isArchived(experimentId)); localExperiment.setArchived(context, appAccount, elm.isArchived(experimentId)); ExperimentOverviewPojo overview = new ExperimentOverviewPojo(); overview.setExperimentId(experimentId); RxDataController.mergeExperiment( dc, experimentId, Experiment.fromExperiment(remoteExperiment, new ExperimentOverviewPojo()), true) .toCompletable() .blockingAwait(); RxDataController.updateExperimentEvenIfNotActive( dc, Experiment.fromExperiment(remoteExperiment, overview), elm.getModified(experimentId), false) .blockingAwait(); for (Trial t : localExperiment.getTrials()) { downloadTrialInBackgroundThread(experimentId, t.getTrialId(), elm); for (Label l : t.getLabels()) { downloadLabelIfNecessary(l, experimentId, elm); } } for (Label l : localExperiment.getLabels()) { downloadLabelIfNecessary(l, experimentId, elm); } try { String imagePath = localExperiment.getImagePath(); if (Strings.isNullOrEmpty(imagePath)) { return; } java.io.File overviewImage = new java.io.File( PictureUtils.getExperimentOverviewFullImagePath( appAccount, PictureUtils.getExperimentOverviewRelativeImagePath( experimentId, localExperiment.getImagePath()))); if (overviewImage.exists()) { uploadFileInBackgroundThread( experimentId, FileMetadataUtil.getInstance().getRelativePathInExperiment(experimentId, overviewImage), elm); } else { downloadFileInBackgroundThread( experimentId, FileMetadataUtil.getInstance().getRelativePathInExperiment(experimentId, overviewImage), elm); } } catch (IOException ioe) { Log.e(TAG, "IOException", ioe); } } private int getPackageFormatVersion(String packageId) throws IOException { return driveApi.getPackageVersion(packageId); } private void uploadLabelIfNecessary( Label l, String experimentId, ExperimentLibraryManager elm) { if (l.getType() == GoosciLabel.Label.ValueType.PICTURE) { try { uploadFileInBackgroundThread(experimentId, l.getPictureLabelValue().getFilePath(), elm); } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "IOException", ioe); } } } } private void downloadLabelIfNecessary( Label l, String experimentId, ExperimentLibraryManager elm) { if (l.getType() == GoosciLabel.Label.ValueType.PICTURE) { try { downloadFileInBackgroundThread(experimentId, l.getPictureLabelValue().getFilePath(), elm); } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "IOException", ioe); } } } } // Gets the Science Journal folder file ID. private String getSJDirectoryId(ExperimentLibraryManager experimentLibraryManager) throws IOException { String folderId = experimentLibraryManager.getFolderId(); if (Strings.isNullOrEmpty(folderId)) { // If the folder isn't stored in the experiment library file, create one and save it. String sjFolderId = driveApi.createNewSJFolder(); experimentLibraryManager.setFolderId(sjFolderId); } else { if (!sjFolderExists && !driveApi.getFileExists(folderId)) { String sjFolderId = driveApi.createNewSJFolder(); experimentLibraryManager.setFolderId(sjFolderId); } sjFolderExists = true; } return experimentLibraryManager.getFolderId(); } // Download the actual experiment proto from Drive. private GoosciExperiment.Experiment downloadExperimentProto(String fileId) throws IOException { return driveApi.downloadExperimentProtoFile(fileId); } // Overwrite an existing proto on Drive private void updateExperimentProto( String experimentId, DriveFile serverExperimentProtoMetadata, LocalSyncManager localSyncManager, String packageId, String experimentTitle) throws IOException { long newVersion; synchronized (appAccount.getLockForExperimentProtoFile()) { newVersion = driveApi.updateExperimentProto( FileMetadataUtil.getInstance() .getExperimentFile(appAccount, experimentId, EXPERIMENT_PROTO_FILE), serverExperimentProtoMetadata, packageId, experimentTitle); } localSyncManager.setDirty(experimentId, false); localSyncManager.setLastSyncedVersion(experimentId, newVersion); } // Create the proto for the first time. There's no Upsert functionality, so we have to create the // file and update the file through different REST endpoints. Shrugging emoji. // We'll also add the Version proto. private void insertExperimentProto( String experimentId, String packageId, LocalSyncManager localSyncManager, String experimentTitle) throws IOException { long newVersion; synchronized (appAccount.getLockForExperimentProtoFile()) { newVersion = driveApi.insertExperimentProto( FileMetadataUtil.getInstance() .getExperimentFile(appAccount, experimentId, EXPERIMENT_PROTO_FILE), packageId, experimentTitle); } localSyncManager.setDirty(experimentId, false); localSyncManager.setLastSyncedVersion(experimentId, newVersion); } // Delete an experiment locally. private void deleteExperiment(String experimentId) { if (RxDataController.experimentExists(dc, experimentId).blockingGet()) { RxDataController.deleteExperiment(dc, experimentId).blockingAwait(); } } // Delete an experiment remotely. private void deleteExperimentRemotely( Context context, ExperimentLibraryManager elm, String experimentId, String directoryId) throws IOException { if (hasLocalPackageId(elm, experimentId)) { String remoteFileId = getExperimentPackageId(context, elm, experimentId, directoryId); if (remoteFileId != null && driveApi.getFileExists(remoteFileId)) { driveApi.trashFileById(remoteFileId); } } } private boolean hasLocalPackageId(ExperimentLibraryManager elm, String experimentId) { String elmId = elm.getFileId(experimentId); return !Strings.isNullOrEmpty(elmId); } // Get the Package ID for the experiment on Drive private String getExperimentPackageId( Context context, ExperimentLibraryManager elm, String experimentId, String directoryId) throws IOException { // If we know it locally, return it. String elmId = elm.getFileId(experimentId); if (!Strings.isNullOrEmpty(elmId)) { return elmId; } String id = driveApi.getExperimentPackageId(context, directoryId); elm.setFileId(experimentId, id); return id; } private void cleanUpDrive( Context context, ExperimentLibraryManager elm, LocalSyncManager lsm, String directoryId) throws IOException { for (String id : elm.getKnownExperiments()) { if (elm.isDeleted(id) && lsm.getDirty(id)) { if (Log.isLoggable(TAG, Log.INFO)) { Log.i(TAG, "Deleting remotely: Marked deleted in library"); } deleteExperimentRemotely(context, elm, id, directoryId); lsm.setDirty(id, false); } } } private void transferFileSyncCollections(Context context, ExperimentLibraryManager elm, Map<String, FileSyncCollection> collectionMap) { for (String experimentId : collectionMap.keySet()) { FileSyncCollection sync = collectionMap.get(experimentId); for (String download : sync.getImageDownloads()) { try { downloadFileInBackgroundThread(experimentId, download, elm); } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "IOException", ioe); } } } for (String upload : sync.getImageUploads()) { try { uploadFileInBackgroundThread(experimentId, upload, elm); } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "IOException", ioe); } } } for (String download : sync.getTrialDownloads()) { try { downloadTrialInBackgroundThread(experimentId, download, elm); } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "IOException", ioe); } } } for (String upload : sync.getTrialUploads()) { try { uploadTrialInBackgroundThread(context, experimentId, upload, elm); } catch (IOException ioe) { if (Log.isLoggable(TAG, Log.ERROR)) { Log.e(TAG, "IOException", ioe); } } } } } private void checkForDeletions(ExperimentLibraryManager elm) throws IOException { List<String> toDelete = new ArrayList<>(); for (String experiment : elm.getKnownExperiments()) { String fileId = elm.getFileId(experiment); if (!elm.isDeleted(experiment) && !driveApi.getFileExists(fileId)) { toDelete.add(experiment); } } for (String experiment : toDelete) { deleteExperiment(experiment); } } }
apache/harmony
35,655
classlib/modules/rmi/src/main/java/org/apache/harmony/rmi/activation/Rmid_Stub.java
/* * RMI stub class * for class org.apache.harmony.rmi.activation.Rmid * Compatible with stub protocol version 1.1/1.2 * * Generated by DRL RMI Compiler (rmic). * * DO NOT EDIT!!! * Contents subject to change without notice! */ package org.apache.harmony.rmi.activation; import org.apache.harmony.rmi.internal.nls.Messages; public final class Rmid_Stub extends java.rmi.server.RemoteStub implements java.rmi.activation.ActivationSystem, java.rmi.activation.ActivationMonitor, java.rmi.activation.Activator, java.rmi.Remote { private static final long serialVersionUID = 2; private static final long interfaceHash = 8470858815147946311L; private static boolean useNewInvoke; private static final java.rmi.server.Operation[] operations = { new java.rmi.server.Operation("java.rmi.MarshalledObject activate(java.rmi.activation.ActivationID, boolean)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationMonitor activeGroup(java.rmi.activation.ActivationGroupID, java.rmi.activation.ActivationInstantiator, long)"), //$NON-NLS-1$ new java.rmi.server.Operation("void activeObject(java.rmi.activation.ActivationID, java.rmi.MarshalledObject)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationDesc getActivationDesc(java.rmi.activation.ActivationID)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationGroupDesc getActivationGroupDesc(java.rmi.activation.ActivationGroupID)"), //$NON-NLS-1$ new java.rmi.server.Operation("void inactiveGroup(java.rmi.activation.ActivationGroupID, long)"), //$NON-NLS-1$ new java.rmi.server.Operation("void inactiveObject(java.rmi.activation.ActivationID)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationGroupID registerGroup(java.rmi.activation.ActivationGroupDesc)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationID registerObject(java.rmi.activation.ActivationDesc)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationDesc setActivationDesc(java.rmi.activation.ActivationID, java.rmi.activation.ActivationDesc)"), //$NON-NLS-1$ new java.rmi.server.Operation("java.rmi.activation.ActivationGroupDesc setActivationGroupDesc(java.rmi.activation.ActivationGroupID, java.rmi.activation.ActivationGroupDesc)"), //$NON-NLS-1$ new java.rmi.server.Operation("void shutdown()"), //$NON-NLS-1$ new java.rmi.server.Operation("void unregisterGroup(java.rmi.activation.ActivationGroupID)"), //$NON-NLS-1$ new java.rmi.server.Operation("void unregisterObject(java.rmi.activation.ActivationID)") //$NON-NLS-1$ }; private static java.lang.reflect.Method $method_activate_0; private static java.lang.reflect.Method $method_activeGroup_1; private static java.lang.reflect.Method $method_activeObject_2; private static java.lang.reflect.Method $method_getActivationDesc_3; private static java.lang.reflect.Method $method_getActivationGroupDesc_4; private static java.lang.reflect.Method $method_inactiveGroup_5; private static java.lang.reflect.Method $method_inactiveObject_6; private static java.lang.reflect.Method $method_registerGroup_7; private static java.lang.reflect.Method $method_registerObject_8; private static java.lang.reflect.Method $method_setActivationDesc_9; private static java.lang.reflect.Method $method_setActivationGroupDesc_10; private static java.lang.reflect.Method $method_shutdown_11; private static java.lang.reflect.Method $method_unregisterGroup_12; private static java.lang.reflect.Method $method_unregisterObject_13; static { try { java.rmi.server.RemoteRef.class.getMethod("invoke", new java.lang.Class[] {java.rmi.Remote.class, java.lang.reflect.Method.class, java.lang.Object[].class, long.class}); //$NON-NLS-1$ $method_activate_0 = java.rmi.activation.Activator.class.getMethod("activate", new java.lang.Class[] {java.rmi.activation.ActivationID.class, boolean.class}); //$NON-NLS-1$ $method_activeGroup_1 = java.rmi.activation.ActivationSystem.class.getMethod("activeGroup", new java.lang.Class[] {java.rmi.activation.ActivationGroupID.class, java.rmi.activation.ActivationInstantiator.class, long.class}); //$NON-NLS-1$ $method_activeObject_2 = java.rmi.activation.ActivationMonitor.class.getMethod("activeObject", new java.lang.Class[] {java.rmi.activation.ActivationID.class, java.rmi.MarshalledObject.class}); //$NON-NLS-1$ $method_getActivationDesc_3 = java.rmi.activation.ActivationSystem.class.getMethod("getActivationDesc", new java.lang.Class[] {java.rmi.activation.ActivationID.class}); //$NON-NLS-1$ $method_getActivationGroupDesc_4 = java.rmi.activation.ActivationSystem.class.getMethod("getActivationGroupDesc", new java.lang.Class[] {java.rmi.activation.ActivationGroupID.class}); //$NON-NLS-1$ $method_inactiveGroup_5 = java.rmi.activation.ActivationMonitor.class.getMethod("inactiveGroup", new java.lang.Class[] {java.rmi.activation.ActivationGroupID.class, long.class}); //$NON-NLS-1$ $method_inactiveObject_6 = java.rmi.activation.ActivationMonitor.class.getMethod("inactiveObject", new java.lang.Class[] {java.rmi.activation.ActivationID.class}); //$NON-NLS-1$ $method_registerGroup_7 = java.rmi.activation.ActivationSystem.class.getMethod("registerGroup", new java.lang.Class[] {java.rmi.activation.ActivationGroupDesc.class}); //$NON-NLS-1$ $method_registerObject_8 = java.rmi.activation.ActivationSystem.class.getMethod("registerObject", new java.lang.Class[] {java.rmi.activation.ActivationDesc.class}); //$NON-NLS-1$ $method_setActivationDesc_9 = java.rmi.activation.ActivationSystem.class.getMethod("setActivationDesc", new java.lang.Class[] {java.rmi.activation.ActivationID.class, java.rmi.activation.ActivationDesc.class}); //$NON-NLS-1$ $method_setActivationGroupDesc_10 = java.rmi.activation.ActivationSystem.class.getMethod("setActivationGroupDesc", new java.lang.Class[] {java.rmi.activation.ActivationGroupID.class, java.rmi.activation.ActivationGroupDesc.class}); //$NON-NLS-1$ $method_shutdown_11 = java.rmi.activation.ActivationSystem.class.getMethod("shutdown", new java.lang.Class[] {}); //$NON-NLS-1$ $method_unregisterGroup_12 = java.rmi.activation.ActivationSystem.class.getMethod("unregisterGroup", new java.lang.Class[] {java.rmi.activation.ActivationGroupID.class}); //$NON-NLS-1$ $method_unregisterObject_13 = java.rmi.activation.ActivationSystem.class.getMethod("unregisterObject", new java.lang.Class[] {java.rmi.activation.ActivationID.class}); //$NON-NLS-1$ useNewInvoke = true; } catch (java.lang.NoSuchMethodException e) { useNewInvoke = false; } } public Rmid_Stub() { super(); } public Rmid_Stub(java.rmi.server.RemoteRef ref) { super(ref); } // Implementation of activate(ActivationID, boolean) public java.rmi.MarshalledObject activate(java.rmi.activation.ActivationID $param_ActivationID_1, boolean $param_boolean_2) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownObjectException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_activate_0, new java.lang.Object[] {$param_ActivationID_1, new java.lang.Boolean($param_boolean_2)}, -8767355154875805558L); return ((java.rmi.MarshalledObject) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 0, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationID_1); out.writeBoolean($param_boolean_2); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.MarshalledObject $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.MarshalledObject) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of activeGroup(ActivationGroupID, ActivationInstantiator, long) public java.rmi.activation.ActivationMonitor activeGroup(java.rmi.activation.ActivationGroupID $param_ActivationGroupID_1, java.rmi.activation.ActivationInstantiator $param_ActivationInstantiator_2, long $param_long_3) throws java.rmi.activation.UnknownGroupException, java.rmi.activation.ActivationException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_activeGroup_1, new java.lang.Object[] {$param_ActivationGroupID_1, $param_ActivationInstantiator_2, new java.lang.Long($param_long_3)}, -4575843150759415294L); return ((java.rmi.activation.ActivationMonitor) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 1, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationGroupID_1); out.writeObject($param_ActivationInstantiator_2); out.writeLong($param_long_3); } catch (java.io.IOException e) { throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationMonitor $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationMonitor) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of activeObject(ActivationID, MarshalledObject) public void activeObject(java.rmi.activation.ActivationID $param_ActivationID_1, java.rmi.MarshalledObject $param_MarshalledObject_2) throws java.rmi.activation.UnknownObjectException, java.rmi.RemoteException { try { if (useNewInvoke) { ref.invoke(this, $method_activeObject_2, new java.lang.Object[] {$param_ActivationID_1, $param_MarshalledObject_2}, 2543984342209939736L); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 2, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationID_1); out.writeObject($param_MarshalledObject_2); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); ref.done(call); } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.UnknownObjectException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of getActivationDesc(ActivationID) public java.rmi.activation.ActivationDesc getActivationDesc(java.rmi.activation.ActivationID $param_ActivationID_1) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownObjectException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_getActivationDesc_3, new java.lang.Object[] {$param_ActivationID_1}, 4830055440982622087L); return ((java.rmi.activation.ActivationDesc) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 3, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationID_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationDesc $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationDesc) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of getActivationGroupDesc(ActivationGroupID) public java.rmi.activation.ActivationGroupDesc getActivationGroupDesc(java.rmi.activation.ActivationGroupID $param_ActivationGroupID_1) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownGroupException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_getActivationGroupDesc_4, new java.lang.Object[] {$param_ActivationGroupID_1}, -8701843806548736528L); return ((java.rmi.activation.ActivationGroupDesc) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 4, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationGroupID_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationGroupDesc $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationGroupDesc) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of inactiveGroup(ActivationGroupID, long) public void inactiveGroup(java.rmi.activation.ActivationGroupID $param_ActivationGroupID_1, long $param_long_2) throws java.rmi.activation.UnknownGroupException, java.rmi.RemoteException { try { if (useNewInvoke) { ref.invoke(this, $method_inactiveGroup_5, new java.lang.Object[] {$param_ActivationGroupID_1, new java.lang.Long($param_long_2)}, -399287892768650944L); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 5, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationGroupID_1); out.writeLong($param_long_2); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); ref.done(call); } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.UnknownGroupException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of inactiveObject(ActivationID) public void inactiveObject(java.rmi.activation.ActivationID $param_ActivationID_1) throws java.rmi.activation.UnknownObjectException, java.rmi.RemoteException { try { if (useNewInvoke) { ref.invoke(this, $method_inactiveObject_6, new java.lang.Object[] {$param_ActivationID_1}, -4165404120701281807L); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 6, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationID_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); ref.done(call); } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.UnknownObjectException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of registerGroup(ActivationGroupDesc) public java.rmi.activation.ActivationGroupID registerGroup(java.rmi.activation.ActivationGroupDesc $param_ActivationGroupDesc_1) throws java.rmi.activation.ActivationException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_registerGroup_7, new java.lang.Object[] {$param_ActivationGroupDesc_1}, 6921515268192657754L); return ((java.rmi.activation.ActivationGroupID) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 7, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationGroupDesc_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationGroupID $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationGroupID) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of registerObject(ActivationDesc) public java.rmi.activation.ActivationID registerObject(java.rmi.activation.ActivationDesc $param_ActivationDesc_1) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownGroupException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_registerObject_8, new java.lang.Object[] {$param_ActivationDesc_1}, -3006759798994351347L); return ((java.rmi.activation.ActivationID) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 8, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationDesc_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationID $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationID) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { //rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of setActivationDesc(ActivationID, ActivationDesc) public java.rmi.activation.ActivationDesc setActivationDesc(java.rmi.activation.ActivationID $param_ActivationID_1, java.rmi.activation.ActivationDesc $param_ActivationDesc_2) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownObjectException, java.rmi.activation.UnknownGroupException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_setActivationDesc_9, new java.lang.Object[] {$param_ActivationID_1, $param_ActivationDesc_2}, 7128043237057180796L); return ((java.rmi.activation.ActivationDesc) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 9, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationID_1); out.writeObject($param_ActivationDesc_2); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationDesc $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationDesc) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of setActivationGroupDesc(ActivationGroupID, ActivationGroupDesc) public java.rmi.activation.ActivationGroupDesc setActivationGroupDesc(java.rmi.activation.ActivationGroupID $param_ActivationGroupID_1, java.rmi.activation.ActivationGroupDesc $param_ActivationGroupDesc_2) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownGroupException, java.rmi.RemoteException { try { if (useNewInvoke) { java.lang.Object $result = ref.invoke(this, $method_setActivationGroupDesc_10, new java.lang.Object[] {$param_ActivationGroupID_1, $param_ActivationGroupDesc_2}, 1213918527826541191L); return ((java.rmi.activation.ActivationGroupDesc) $result); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 10, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationGroupID_1); out.writeObject($param_ActivationGroupDesc_2); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); java.rmi.activation.ActivationGroupDesc $result; try { java.io.ObjectInput in = call.getInputStream(); $result = (java.rmi.activation.ActivationGroupDesc) in.readObject(); } catch (java.io.IOException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } catch (java.lang.ClassNotFoundException e) { // rmi.27=Error unmarshalling return value throw new java.rmi.UnmarshalException(Messages.getString("rmi.27"), e); //$NON-NLS-1$ } finally { ref.done(call); } return $result; } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of shutdown() public void shutdown() throws java.rmi.RemoteException { try { if (useNewInvoke) { ref.invoke(this, $method_shutdown_11, null, -7207851917985848402L); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 11, interfaceHash); ref.invoke(call); ref.done(call); } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of unregisterGroup(ActivationGroupID) public void unregisterGroup(java.rmi.activation.ActivationGroupID $param_ActivationGroupID_1) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownGroupException, java.rmi.RemoteException { try { if (useNewInvoke) { ref.invoke(this, $method_unregisterGroup_12, new java.lang.Object[] {$param_ActivationGroupID_1}, 3768097077835970701L); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 12, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationGroupID_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); ref.done(call); } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } // Implementation of unregisterObject(ActivationID) public void unregisterObject(java.rmi.activation.ActivationID $param_ActivationID_1) throws java.rmi.activation.ActivationException, java.rmi.activation.UnknownObjectException, java.rmi.RemoteException { try { if (useNewInvoke) { ref.invoke(this, $method_unregisterObject_13, new java.lang.Object[] {$param_ActivationID_1}, -6843850585331411084L); } else { java.rmi.server.RemoteCall call = ref.newCall((java.rmi.server.RemoteObject) this, operations, 13, interfaceHash); try { java.io.ObjectOutput out = call.getOutputStream(); out.writeObject($param_ActivationID_1); } catch (java.io.IOException e) { // rmi.26=Error marshalling arguments throw new java.rmi.MarshalException(Messages.getString("rmi.26"), e); //$NON-NLS-1$ } ref.invoke(call); ref.done(call); } } catch (java.lang.RuntimeException e) { throw e; } catch (java.rmi.RemoteException e) { throw e; } catch (java.rmi.activation.ActivationException e) { throw e; } catch (java.lang.Exception e) { // rmi.0C=Undeclared checked exception throw new java.rmi.UnexpectedException(Messages.getString("rmi.0C"), e); //$NON-NLS-1$ } } }
apache/flink
35,453
flink-table/flink-table-runtime/src/test/java/org/apache/flink/table/runtime/util/ResettableExternalBufferTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.util; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.memory.MemoryManagerBuilder; import org.apache.flink.table.data.StringData; import org.apache.flink.table.data.binary.BinaryRowData; import org.apache.flink.table.data.writer.BinaryRowWriter; import org.apache.flink.table.runtime.typeutils.BinaryRowDataSerializer; import org.apache.commons.lang3.RandomStringUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import static org.apache.flink.runtime.memory.MemoryManager.DEFAULT_PAGE_SIZE; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Test for {@link ResettableExternalBuffer}. */ class ResettableExternalBufferTest { private static final int MEMORY_SIZE = 1024 * DEFAULT_PAGE_SIZE; private MemoryManager memManager; private IOManager ioManager; private Random random; private BinaryRowDataSerializer serializer; private BinaryRowDataSerializer multiColumnFixedLengthSerializer; private BinaryRowDataSerializer multiColumnVariableLengthSerializer; @BeforeEach void before() { this.memManager = MemoryManagerBuilder.newBuilder().setMemorySize(MEMORY_SIZE).build(); this.ioManager = new IOManagerAsync(); this.random = new Random(); this.serializer = new BinaryRowDataSerializer(1); this.multiColumnFixedLengthSerializer = new BinaryRowDataSerializer(3); this.multiColumnVariableLengthSerializer = new BinaryRowDataSerializer(5); } private ResettableExternalBuffer newBuffer(long memorySize) { return newBuffer(memorySize, this.serializer, true); } private ResettableExternalBuffer newBuffer( long memorySize, BinaryRowDataSerializer serializer, boolean isRowAllInFixedPart) { return new ResettableExternalBuffer( ioManager, new LazyMemorySegmentPool( this, memManager, (int) (memorySize / memManager.getPageSize())), serializer, isRowAllInFixedPart); } @Test void testLess() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 100; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(0).isEqualTo(buffer.getSpillChannels().size()); // repeat read assertBuffer(expected, buffer); buffer.newIterator(); assertBuffer(expected, buffer); buffer.close(); } @Test void testSpill() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 5000; // 16 * 5000 List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // repeat read assertBuffer(expected, buffer); buffer.newIterator(); assertBuffer(expected, buffer); buffer.close(); } @Test void testBufferReset() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); // less insertMulti(buffer, 10); buffer.reset(); assertThat(0).isEqualTo(buffer.size()); // not spill List<Long> expected = insertMulti(buffer, 100); assertThat(100).isEqualTo(buffer.size()); assertBuffer(expected, buffer); buffer.reset(); // spill expected = insertMulti(buffer, 2500); assertThat(2500).isEqualTo(buffer.size()); assertBuffer(expected, buffer); buffer.close(); } @Test void testBufferResetWithSpill() throws Exception { int inMemoryThreshold = 20; ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); // spill List<Long> expected = insertMulti(buffer, 2500); assertThat(2500).isEqualTo(buffer.size()); assertBuffer(expected, buffer); buffer.reset(); // spill, but not read the values insertMulti(buffer, 2500); buffer.newIterator(); assertThat(2500).isEqualTo(buffer.size()); buffer.reset(); // not spill expected = insertMulti(buffer, inMemoryThreshold / 2); assertBuffer(expected, buffer); buffer.reset(); assertThat(0).isEqualTo(buffer.size()); // less expected = insertMulti(buffer, 100); assertThat(100).isEqualTo(buffer.size()); assertBuffer(expected, buffer); buffer.reset(); buffer.close(); } @Test void testHugeRecord() throws Exception { try (ResettableExternalBuffer buffer = new ResettableExternalBuffer( ioManager, new LazyMemorySegmentPool( this, memManager, 3 * DEFAULT_PAGE_SIZE / memManager.getPageSize()), new BinaryRowDataSerializer(1), false)) { assertThatThrownBy( () -> { writeHuge(buffer, 50000); writeHuge(buffer, 10); }) .isInstanceOf(IOException.class); } } @Test void testRandomAccessLess() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 100; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(0).isEqualTo(buffer.getSpillChannels().size()); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { assertRandomAccess(expected, buffer, beginPos.get(i)); } buffer.close(); } @Test void testRandomAccessSpill() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 5000; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { assertRandomAccess(expected, buffer, beginPos.get(i)); } buffer.close(); } @Test void testBufferResetWithSpillAndRandomAccess() throws Exception { final int tries = 100; ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); // spill, random access and reset twice List<Long> expected; for (int i = 0; i < 2; i++) { expected = insertMulti(buffer, 2500); assertThat(2500).isEqualTo(buffer.size()); for (int j = 0; j < tries; j++) { assertRandomAccess(expected, buffer); } buffer.reset(); } // spill, but not read the values insertMulti(buffer, 2500); buffer.newIterator(); assertThat(2500).isEqualTo(buffer.size()); buffer.reset(); // not spill expected = insertMulti(buffer, 10); for (int i = 0; i < tries; i++) { assertRandomAccess(expected, buffer); } buffer.reset(); assertThat(0).isEqualTo(buffer.size()); // less expected = insertMulti(buffer, 100); assertThat(100).isEqualTo(buffer.size()); for (int i = 0; i < tries; i++) { assertRandomAccess(expected, buffer); } buffer.reset(); buffer.close(); } @Test void testMultiColumnFixedLengthRandomAccessLess() throws Exception { testMultiColumnRandomAccessLess( multiColumnFixedLengthSerializer, FixedLengthRowData.class, true); } @Test void testMultiColumnFixedLengthRandomAccessSpill() throws Exception { testMultiColumnRandomAccessSpill( multiColumnFixedLengthSerializer, FixedLengthRowData.class, true); } @Test void testBufferResetWithSpillAndMultiColumnFixedLengthRandomAccess() throws Exception { testBufferResetWithSpillAndMultiColumnRandomAccess( multiColumnFixedLengthSerializer, FixedLengthRowData.class, true); } @Test void testMultiColumnVariableLengthRandomAccessLess() throws Exception { testMultiColumnRandomAccessLess( multiColumnVariableLengthSerializer, VariableLengthRowData.class, false); } @Test void testMultiColumnVariableLengthRandomAccessSpill() throws Exception { testMultiColumnRandomAccessSpill( multiColumnVariableLengthSerializer, VariableLengthRowData.class, false); } @Test void testBufferResetWithSpillAndMultiColumnVariableLengthRandomAccess() throws Exception { testBufferResetWithSpillAndMultiColumnRandomAccess( multiColumnVariableLengthSerializer, VariableLengthRowData.class, false); } @Test void testIteratorOnFixedLengthEmptyBuffer() throws Exception { testIteratorOnMultiColumnEmptyBuffer(multiColumnFixedLengthSerializer, true); } @Test void testFixedLengthRandomAccessOutOfRange() throws Exception { testRandomAccessOutOfRange( multiColumnFixedLengthSerializer, FixedLengthRowData.class, true); } @Test void testIteratorOnVariableLengthEmptyBuffer() throws Exception { testIteratorOnMultiColumnEmptyBuffer(multiColumnVariableLengthSerializer, false); } @Test void testVariableLengthRandomAccessOutOfRange() throws Exception { testRandomAccessOutOfRange( multiColumnVariableLengthSerializer, VariableLengthRowData.class, false); } @Test void testIteratorReset() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 100; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(0).isEqualTo(buffer.getSpillChannels().size()); // reset and read ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(); assertBuffer(expected, iterator); iterator.reset(); assertBuffer(expected, iterator); iterator.close(); buffer.close(); } @Test void testIteratorResetWithSpill() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 5000; // 16 * 5000 List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // reset and read ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(); assertBuffer(expected, iterator); iterator.reset(); assertBuffer(expected, iterator); iterator.close(); buffer.close(); } @Test void testIteratorResetWithRandomAccess() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 100; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(0).isEqualTo(buffer.getSpillChannels().size()); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { int begin = beginPos.get(i); ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin); assertRandomAccess(expected, iterator, begin); iterator.reset(); assertRandomAccess(expected, iterator, begin); iterator.close(); } buffer.close(); } @Test void testIteratorResetWithRandomAccessSpill() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 5000; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { int begin = beginPos.get(i); ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin); assertRandomAccess(expected, iterator, begin); iterator.reset(); assertRandomAccess(expected, iterator, begin); iterator.close(); } buffer.close(); } @Test void testMultipleIteratorsLess() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 100; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(0).isEqualTo(buffer.getSpillChannels().size()); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { int beginIdx = beginPos.get(i); ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(beginIdx); assertRandomAccess(expected, iterator, beginIdx); if (i % 3 == 0) { iterator.close(); } } buffer.close(); } @Test void testMultipleIteratorsSpill() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 5000; List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { int beginIdx = beginPos.get(i); ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(beginIdx); assertRandomAccess(expected, iterator, beginIdx); if (i % 3 == 0) { iterator.close(); } } buffer.close(); } @Test void testMultipleIteratorsWithIteratorReset() throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2); int number = 5000; // 16 * 5000 List<Long> expected = insertMulti(buffer, number); assertThat(number).isEqualTo(buffer.size()); assertBuffer(expected, buffer); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // reset and read ResettableExternalBuffer.BufferIterator iterator1 = buffer.newIterator(); assertBuffer(expected, iterator1); iterator1.reset(); assertBuffer(expected, iterator1); ResettableExternalBuffer.BufferIterator iterator2 = buffer.newIterator(); assertBuffer(expected, iterator2); iterator2.reset(); assertBuffer(expected, iterator2); iterator1.reset(); assertBuffer(expected, iterator1); iterator2.reset(); assertBuffer(expected, iterator2); iterator1.close(); iterator2.reset(); assertBuffer(expected, iterator2); iterator2.close(); buffer.close(); } @Test void testUpdateIteratorFixedLengthLess() { assertThatThrownBy( () -> testUpdateIteratorLess( multiColumnFixedLengthSerializer, FixedLengthRowData.class, true)) .isInstanceOf(IllegalStateException.class); } @Test void testUpdateIteratorFixedLengthSpill() { assertThatThrownBy( () -> testUpdateIteratorSpill( multiColumnFixedLengthSerializer, FixedLengthRowData.class, true)) .isInstanceOf(IllegalStateException.class); } @Test void testUpdateIteratorVariableLengthLess() { assertThatThrownBy( () -> testUpdateIteratorLess( multiColumnVariableLengthSerializer, VariableLengthRowData.class, false)) .isInstanceOf(IllegalStateException.class); } @Test void testUpdateIteratorVariableLengthSpill() { assertThatThrownBy( () -> testUpdateIteratorSpill( multiColumnVariableLengthSerializer, VariableLengthRowData.class, false)) .isInstanceOf(IllegalStateException.class); } private <T extends RowData> void testMultiColumnRandomAccessLess( BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); int number = 30; List<RowData> expected = insertMultiColumn(buffer, number, clazz); assertThat(number).isEqualTo(buffer.size()); assertThat(0).isEqualTo(buffer.getSpillChannels().size()); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { assertMultiColumnRandomAccess(expected, buffer, beginPos.get(i)); } buffer.close(); } private <T extends RowData> void testMultiColumnRandomAccessSpill( BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); int number = 4000; List<RowData> expected = insertMultiColumn(buffer, number, clazz); assertThat(number).isEqualTo(buffer.size()); assertThat(buffer.getSpillChannels().size()).isGreaterThan(0); // repeat random access List<Integer> beginPos = new ArrayList<>(); for (int i = 0; i < buffer.size(); i++) { beginPos.add(i); } Collections.shuffle(beginPos); for (int i = 0; i < buffer.size(); i++) { assertMultiColumnRandomAccess(expected, buffer, beginPos.get(i)); } buffer.close(); } private <T extends RowData> void testBufferResetWithSpillAndMultiColumnRandomAccess( BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception { final int tries = 100; ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); // spill, random access and reset twice List<RowData> expected; for (int i = 0; i < 2; i++) { expected = insertMultiColumn(buffer, 1500, clazz); assertThat(1500).isEqualTo(buffer.size()); for (int j = 0; j < tries; j++) { assertMultiColumnRandomAccess(expected, buffer); } buffer.reset(); } // spill, but not read the values insertMultiColumn(buffer, 1500, clazz); buffer.newIterator(); assertThat(1500).isEqualTo(buffer.size()); buffer.reset(); // not spill expected = insertMultiColumn(buffer, 10, clazz); for (int i = 0; i < tries; i++) { assertMultiColumnRandomAccess(expected, buffer); } buffer.reset(); assertThat(0).isEqualTo(buffer.size()); // less expected = insertMultiColumn(buffer, 30, clazz); assertThat(30).isEqualTo(buffer.size()); for (int i = 0; i < tries; i++) { assertMultiColumnRandomAccess(expected, buffer); } buffer.reset(); buffer.close(); } private void testIteratorOnMultiColumnEmptyBuffer( BinaryRowDataSerializer serializer, boolean isRowAllInFixedPart) { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); ResettableExternalBuffer.BufferIterator iterator; buffer.complete(); iterator = buffer.newIterator(0); assertThat(iterator.advanceNext()).isFalse(); iterator = buffer.newIterator(random.nextInt(Integer.MAX_VALUE)); assertThat(iterator.advanceNext()).isFalse(); buffer.close(); } private <T extends RowData> void testRandomAccessOutOfRange( BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); int number = 100; List<RowData> expected = insertMultiColumn(buffer, number, clazz); assertThat(number).isEqualTo(buffer.size()); assertMultiColumnRandomAccess(expected, buffer, 0); ResettableExternalBuffer.BufferIterator iterator; iterator = buffer.newIterator(number); assertThat(iterator.advanceNext()).isFalse(); iterator = buffer.newIterator(number + random.nextInt(Integer.MAX_VALUE)); assertThat(iterator.advanceNext()).isFalse(); iterator = buffer.newIterator(random.nextInt(number)); assertThat(iterator.advanceNext()).isTrue(); buffer.close(); } private <T extends RowData> void testUpdateIteratorLess( BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); int number = 20; int iters = 3; List<RowData> expected = new ArrayList<>(); List<ResettableExternalBuffer.BufferIterator> iterators = new ArrayList<>(); for (int i = 0; i < iters; i++) { iterators.add(buffer.newIterator()); } for (int i = 0; i < number; i++) { RowData data = clazz.newInstance(); data.insertIntoBuffer(buffer); expected.add(data); for (ResettableExternalBuffer.BufferIterator iterator : iterators) { assertThat(iterator.advanceNext()).isTrue(); BinaryRowData row = iterator.getRow(); data.checkSame(row); assertThat(iterator.advanceNext()).isFalse(); } } for (ResettableExternalBuffer.BufferIterator iterator : iterators) { iterator.reset(); } for (int i = 0; i < number; i++) { for (ResettableExternalBuffer.BufferIterator iterator : iterators) { assertThat(iterator.advanceNext()).isTrue(); BinaryRowData row = iterator.getRow(); expected.get(i).checkSame(row); } } for (ResettableExternalBuffer.BufferIterator iterator : iterators) { iterator.close(); } assertMultiColumnRandomAccess(expected, buffer); buffer.close(); } private <T extends RowData> void testUpdateIteratorSpill( BinaryRowDataSerializer serializer, Class<T> clazz, boolean isRowAllInFixedPart) throws Exception { ResettableExternalBuffer buffer = newBuffer(DEFAULT_PAGE_SIZE * 2, serializer, isRowAllInFixedPart); int number = 100; int step = 20; int iters = 3; List<RowData> expected = new ArrayList<>(); List<RowData> smallExpected = new ArrayList<>(); List<ResettableExternalBuffer.BufferIterator> iterators = new ArrayList<>(); for (int i = 0; i < iters; i++) { iterators.add(buffer.newIterator()); } for (int i = 0; i < number; i++) { smallExpected.clear(); for (int j = 0; j < step; j++) { RowData data = clazz.newInstance(); data.insertIntoBuffer(buffer); expected.add(data); smallExpected.add(data); } for (int j = 0; j < step; j++) { for (ResettableExternalBuffer.BufferIterator iterator : iterators) { assertThat(iterator.advanceNext()).isTrue(); BinaryRowData row = iterator.getRow(); smallExpected.get(j).checkSame(row); } } for (ResettableExternalBuffer.BufferIterator iterator : iterators) { assertThat(iterator.advanceNext()).isFalse(); } } for (ResettableExternalBuffer.BufferIterator iterator : iterators) { iterator.reset(); } for (int i = 0; i < number * step; i++) { for (ResettableExternalBuffer.BufferIterator iterator : iterators) { assertThat(iterator.advanceNext()).isTrue(); BinaryRowData row = iterator.getRow(); expected.get(i).checkSame(row); } } for (ResettableExternalBuffer.BufferIterator iterator : iterators) { iterator.close(); } assertMultiColumnRandomAccess(expected, buffer); buffer.close(); } private void writeHuge(ResettableExternalBuffer buffer, int size) throws IOException { BinaryRowData row = new BinaryRowData(1); BinaryRowWriter writer = new BinaryRowWriter(row); writer.reset(); writer.writeString(0, StringData.fromString(RandomStringUtils.random(size))); writer.complete(); buffer.add(row); } private void assertBuffer(List<Long> expected, ResettableExternalBuffer buffer) { ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(); assertBuffer(expected, iterator); iterator.close(); } private void assertBuffer( List<Long> expected, ResettableExternalBuffer.BufferIterator iterator) { List<Long> values = new ArrayList<>(); while (iterator.advanceNext()) { values.add(iterator.getRow().getLong(0)); } assertThat(values).isEqualTo(expected); } private List<Long> insertMulti(ResettableExternalBuffer buffer, int cnt) throws IOException { ArrayList<Long> expected = new ArrayList<>(cnt); insertMulti(buffer, cnt, expected); buffer.complete(); return expected; } private void insertMulti(ResettableExternalBuffer buffer, int cnt, List<Long> expected) throws IOException { for (int i = 0; i < cnt; i++) { expected.add(randomInsert(buffer)); } } private long randomInsert(ResettableExternalBuffer buffer) throws IOException { long l = random.nextLong(); BinaryRowData row = new BinaryRowData(1); BinaryRowWriter writer = new BinaryRowWriter(row); writer.reset(); writer.writeLong(0, l); writer.complete(); buffer.add(row); return l; } private void assertRandomAccess(List<Long> expected, ResettableExternalBuffer buffer) { int begin = random.nextInt(buffer.size()); assertRandomAccess(expected, buffer, begin); } private void assertRandomAccess( List<Long> expected, ResettableExternalBuffer buffer, int begin) { ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin); assertRandomAccess(expected, iterator, begin); iterator.close(); } private void assertRandomAccess( List<Long> expected, ResettableExternalBuffer.BufferIterator iterator, int begin) { List<Long> values = new ArrayList<>(); while (iterator.advanceNext()) { values.add(iterator.getRow().getLong(0)); } assertThat(values).isEqualTo(expected.subList(begin, expected.size())); } private <T extends RowData> List<RowData> insertMultiColumn( ResettableExternalBuffer buffer, int cnt, Class<T> clazz) throws IOException, IllegalAccessException, InstantiationException { ArrayList<RowData> expected = new ArrayList<>(cnt); insertMultiColumn(buffer, cnt, expected, clazz); buffer.complete(); return expected; } private <T extends RowData> void insertMultiColumn( ResettableExternalBuffer buffer, int cnt, List<RowData> expected, Class<T> clazz) throws IOException, IllegalAccessException, InstantiationException { for (int i = 0; i < cnt; i++) { RowData data = clazz.newInstance(); data.insertIntoBuffer(buffer); expected.add(data); } buffer.complete(); } private void assertMultiColumnRandomAccess( List<RowData> expected, ResettableExternalBuffer buffer) { int begin = random.nextInt(buffer.size()); assertMultiColumnRandomAccess(expected, buffer, begin); } private void assertMultiColumnRandomAccess( List<RowData> expected, ResettableExternalBuffer buffer, int begin) { ResettableExternalBuffer.BufferIterator iterator = buffer.newIterator(begin); for (int i = begin; i < buffer.size(); i++) { assertThat(iterator.advanceNext()).isTrue(); expected.get(i).checkSame(iterator.getRow()); } } private interface RowData { void insertIntoBuffer(ResettableExternalBuffer buffer) throws IOException; void checkSame(BinaryRowData row); } private static class FixedLengthRowData implements RowData { private final boolean col0; private final long col1; private final int col2; FixedLengthRowData() { Random random = new Random(); col0 = random.nextBoolean(); col1 = random.nextLong(); col2 = random.nextInt(); } @Override public void insertIntoBuffer(ResettableExternalBuffer buffer) throws IOException { BinaryRowData row = new BinaryRowData(3); BinaryRowWriter writer = new BinaryRowWriter(row); writer.reset(); writer.writeBoolean(0, col0); writer.writeLong(1, col1); writer.writeInt(2, col2); writer.complete(); buffer.add(row); } @Override public void checkSame(BinaryRowData row) { assertThat(row.getBoolean(0)).isEqualTo(col0); assertThat(row.getLong(1)).isEqualTo(col1); assertThat(row.getInt(2)).isEqualTo(col2); } } private static class VariableLengthRowData implements RowData { private final boolean col0; private final long col1; private final StringData col2; private final int col3; private final StringData col4; public VariableLengthRowData() { Random random = new Random(); col0 = random.nextBoolean(); col1 = random.nextLong(); col2 = StringData.fromString(RandomStringUtils.random(random.nextInt(50) + 1)); col3 = random.nextInt(); col4 = StringData.fromString(RandomStringUtils.random(random.nextInt(50) + 1)); } @Override public void insertIntoBuffer(ResettableExternalBuffer buffer) throws IOException { BinaryRowData row = new BinaryRowData(5); BinaryRowWriter writer = new BinaryRowWriter(row); writer.reset(); writer.writeBoolean(0, col0); writer.writeLong(1, col1); writer.writeString(2, col2); writer.writeInt(3, col3); writer.writeString(4, col4); writer.complete(); buffer.add(row); } @Override public void checkSame(BinaryRowData row) { assertThat(row.getBoolean(0)).isEqualTo(col0); assertThat(row.getLong(1)).isEqualTo(col1); assertThat(row.getString(2)).isEqualTo(col2); assertThat(row.getInt(3)).isEqualTo(col3); assertThat(row.getString(4)).isEqualTo(col4); } } }
googleapis/google-cloud-java
35,230
java-functions/proto-google-cloud-functions-v2alpha/src/main/java/com/google/cloud/functions/v2alpha/StorageSource.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/functions/v2alpha/functions.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.functions.v2alpha; /** * * * <pre> * Location of the source in an archive file in Google Cloud Storage. * </pre> * * Protobuf type {@code google.cloud.functions.v2alpha.StorageSource} */ public final class StorageSource extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.functions.v2alpha.StorageSource) StorageSourceOrBuilder { private static final long serialVersionUID = 0L; // Use StorageSource.newBuilder() to construct. private StorageSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private StorageSource() { bucket_ = ""; object_ = ""; sourceUploadUrl_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new StorageSource(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_StorageSource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_StorageSource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2alpha.StorageSource.class, com.google.cloud.functions.v2alpha.StorageSource.Builder.class); } public static final int BUCKET_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object bucket_ = ""; /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bucket. */ @java.lang.Override public java.lang.String getBucket() { java.lang.Object ref = bucket_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bucket_ = s; return s; } } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bytes for bucket. */ @java.lang.Override public com.google.protobuf.ByteString getBucketBytes() { java.lang.Object ref = bucket_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bucket_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OBJECT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object object_ = ""; /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The object. */ @java.lang.Override public java.lang.String getObject() { java.lang.Object ref = object_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); object_ = s; return s; } } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The bytes for object. */ @java.lang.Override public com.google.protobuf.ByteString getObjectBytes() { java.lang.Object ref = object_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); object_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int GENERATION_FIELD_NUMBER = 3; private long generation_ = 0L; /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @return The generation. */ @java.lang.Override public long getGeneration() { return generation_; } public static final int SOURCE_UPLOAD_URL_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object sourceUploadUrl_ = ""; /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The sourceUploadUrl. */ @java.lang.Override public java.lang.String getSourceUploadUrl() { java.lang.Object ref = sourceUploadUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceUploadUrl_ = s; return s; } } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The bytes for sourceUploadUrl. */ @java.lang.Override public com.google.protobuf.ByteString getSourceUploadUrlBytes() { java.lang.Object ref = sourceUploadUrl_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceUploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bucket_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, bucket_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(object_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, object_); } if (generation_ != 0L) { output.writeInt64(3, generation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceUploadUrl_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, sourceUploadUrl_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(bucket_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, bucket_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(object_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, object_); } if (generation_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, generation_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceUploadUrl_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, sourceUploadUrl_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.functions.v2alpha.StorageSource)) { return super.equals(obj); } com.google.cloud.functions.v2alpha.StorageSource other = (com.google.cloud.functions.v2alpha.StorageSource) obj; if (!getBucket().equals(other.getBucket())) return false; if (!getObject().equals(other.getObject())) return false; if (getGeneration() != other.getGeneration()) return false; if (!getSourceUploadUrl().equals(other.getSourceUploadUrl())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + BUCKET_FIELD_NUMBER; hash = (53 * hash) + getBucket().hashCode(); hash = (37 * hash) + OBJECT_FIELD_NUMBER; hash = (53 * hash) + getObject().hashCode(); hash = (37 * hash) + GENERATION_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getGeneration()); hash = (37 * hash) + SOURCE_UPLOAD_URL_FIELD_NUMBER; hash = (53 * hash) + getSourceUploadUrl().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2alpha.StorageSource parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.functions.v2alpha.StorageSource parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.functions.v2alpha.StorageSource parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.functions.v2alpha.StorageSource prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Location of the source in an archive file in Google Cloud Storage. * </pre> * * Protobuf type {@code google.cloud.functions.v2alpha.StorageSource} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.functions.v2alpha.StorageSource) com.google.cloud.functions.v2alpha.StorageSourceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_StorageSource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_StorageSource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.functions.v2alpha.StorageSource.class, com.google.cloud.functions.v2alpha.StorageSource.Builder.class); } // Construct using com.google.cloud.functions.v2alpha.StorageSource.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; bucket_ = ""; object_ = ""; generation_ = 0L; sourceUploadUrl_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.functions.v2alpha.FunctionsProto .internal_static_google_cloud_functions_v2alpha_StorageSource_descriptor; } @java.lang.Override public com.google.cloud.functions.v2alpha.StorageSource getDefaultInstanceForType() { return com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance(); } @java.lang.Override public com.google.cloud.functions.v2alpha.StorageSource build() { com.google.cloud.functions.v2alpha.StorageSource result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.functions.v2alpha.StorageSource buildPartial() { com.google.cloud.functions.v2alpha.StorageSource result = new com.google.cloud.functions.v2alpha.StorageSource(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.functions.v2alpha.StorageSource result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.bucket_ = bucket_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.object_ = object_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.generation_ = generation_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.sourceUploadUrl_ = sourceUploadUrl_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.functions.v2alpha.StorageSource) { return mergeFrom((com.google.cloud.functions.v2alpha.StorageSource) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.functions.v2alpha.StorageSource other) { if (other == com.google.cloud.functions.v2alpha.StorageSource.getDefaultInstance()) return this; if (!other.getBucket().isEmpty()) { bucket_ = other.bucket_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getObject().isEmpty()) { object_ = other.object_; bitField0_ |= 0x00000002; onChanged(); } if (other.getGeneration() != 0L) { setGeneration(other.getGeneration()); } if (!other.getSourceUploadUrl().isEmpty()) { sourceUploadUrl_ = other.sourceUploadUrl_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { bucket_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { object_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { generation_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { sourceUploadUrl_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object bucket_ = ""; /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bucket. */ public java.lang.String getBucket() { java.lang.Object ref = bucket_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); bucket_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return The bytes for bucket. */ public com.google.protobuf.ByteString getBucketBytes() { java.lang.Object ref = bucket_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); bucket_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @param value The bucket to set. * @return This builder for chaining. */ public Builder setBucket(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bucket_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @return This builder for chaining. */ public Builder clearBucket() { bucket_ = getDefaultInstance().getBucket(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Google Cloud Storage bucket containing the source (see * [Bucket Name * Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). * </pre> * * <code>string bucket = 1;</code> * * @param value The bytes for bucket to set. * @return This builder for chaining. */ public Builder setBucketBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bucket_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object object_ = ""; /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The object. */ public java.lang.String getObject() { java.lang.Object ref = object_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); object_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return The bytes for object. */ public com.google.protobuf.ByteString getObjectBytes() { java.lang.Object ref = object_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); object_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @param value The object to set. * @return This builder for chaining. */ public Builder setObject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } object_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @return This builder for chaining. */ public Builder clearObject() { object_ = getDefaultInstance().getObject(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Google Cloud Storage object containing the source. * * This object must be a gzipped archive file (`.tar.gz`) containing source to * build. * </pre> * * <code>string object = 2;</code> * * @param value The bytes for object to set. * @return This builder for chaining. */ public Builder setObjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); object_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private long generation_; /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @return The generation. */ @java.lang.Override public long getGeneration() { return generation_; } /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @param value The generation to set. * @return This builder for chaining. */ public Builder setGeneration(long value) { generation_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Google Cloud Storage generation for the object. If the generation is * omitted, the latest generation will be used. * </pre> * * <code>int64 generation = 3;</code> * * @return This builder for chaining. */ public Builder clearGeneration() { bitField0_ = (bitField0_ & ~0x00000004); generation_ = 0L; onChanged(); return this; } private java.lang.Object sourceUploadUrl_ = ""; /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The sourceUploadUrl. */ public java.lang.String getSourceUploadUrl() { java.lang.Object ref = sourceUploadUrl_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); sourceUploadUrl_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return The bytes for sourceUploadUrl. */ public com.google.protobuf.ByteString getSourceUploadUrlBytes() { java.lang.Object ref = sourceUploadUrl_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); sourceUploadUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @param value The sourceUploadUrl to set. * @return This builder for chaining. */ public Builder setSourceUploadUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } sourceUploadUrl_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @return This builder for chaining. */ public Builder clearSourceUploadUrl() { sourceUploadUrl_ = getDefaultInstance().getSourceUploadUrl(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * When the specified storage bucket is a 1st gen function uploard url bucket, * this field should be set as the generated upload url for 1st gen * deployment. * </pre> * * <code>string source_upload_url = 4;</code> * * @param value The bytes for sourceUploadUrl to set. * @return This builder for chaining. */ public Builder setSourceUploadUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); sourceUploadUrl_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.functions.v2alpha.StorageSource) } // @@protoc_insertion_point(class_scope:google.cloud.functions.v2alpha.StorageSource) private static final com.google.cloud.functions.v2alpha.StorageSource DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.functions.v2alpha.StorageSource(); } public static com.google.cloud.functions.v2alpha.StorageSource getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<StorageSource> PARSER = new com.google.protobuf.AbstractParser<StorageSource>() { @java.lang.Override public StorageSource parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<StorageSource> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<StorageSource> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.functions.v2alpha.StorageSource getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,483
java-shopping-merchant-reviews/google-shopping-merchant-reviews/src/main/java/com/google/shopping/merchant/reviews/v1beta/MerchantReviewsServiceClient.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.reviews.v1beta; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutures; import com.google.api.core.BetaApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.paging.AbstractFixedSizeCollection; import com.google.api.gax.paging.AbstractPage; import com.google.api.gax.paging.AbstractPagedListResponse; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.util.concurrent.MoreExecutors; import com.google.protobuf.Empty; import com.google.shopping.merchant.reviews.v1beta.stub.MerchantReviewsServiceStub; import com.google.shopping.merchant.reviews.v1beta.stub.MerchantReviewsServiceStubSettings; import java.io.IOException; import java.util.List; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Service Description: Service to manage merchant reviews. * * <p>This class provides the ability to make remote calls to the backing service through method * calls that map to API methods. Sample code to get started: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * MerchantReviewName name = MerchantReviewName.of("[ACCOUNT]", "[NAME]"); * MerchantReview response = merchantReviewsServiceClient.getMerchantReview(name); * } * }</pre> * * <p>Note: close() needs to be called on the MerchantReviewsServiceClient object to clean up * resources such as threads. In the example above, try-with-resources is used, which automatically * calls close(). * * <table> * <caption>Methods</caption> * <tr> * <th>Method</th> * <th>Description</th> * <th>Method Variants</th> * </tr> * <tr> * <td><p> GetMerchantReview</td> * <td><p> Gets a merchant review.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> getMerchantReview(GetMerchantReviewRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> getMerchantReview(MerchantReviewName name) * <li><p> getMerchantReview(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> getMerchantReviewCallable() * </ul> * </td> * </tr> * <tr> * <td><p> ListMerchantReviews</td> * <td><p> Lists merchant reviews.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> listMerchantReviews(ListMerchantReviewsRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> listMerchantReviews(AccountName parent) * <li><p> listMerchantReviews(String parent) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> listMerchantReviewsPagedCallable() * <li><p> listMerchantReviewsCallable() * </ul> * </td> * </tr> * <tr> * <td><p> InsertMerchantReview</td> * <td><p> Inserts a review for your Merchant Center account. If the review already exists, then the review is replaced with the new instance.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> insertMerchantReview(InsertMerchantReviewRequest request) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> insertMerchantReviewCallable() * </ul> * </td> * </tr> * <tr> * <td><p> DeleteMerchantReview</td> * <td><p> Deletes merchant review.</td> * <td> * <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p> * <ul> * <li><p> deleteMerchantReview(DeleteMerchantReviewRequest request) * </ul> * <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p> * <ul> * <li><p> deleteMerchantReview(MerchantReviewName name) * <li><p> deleteMerchantReview(String name) * </ul> * <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p> * <ul> * <li><p> deleteMerchantReviewCallable() * </ul> * </td> * </tr> * </table> * * <p>See the individual methods for example code. * * <p>Many parameters require resource names to be formatted in a particular way. To assist with * these names, this class includes a format method for each type of name, and additionally a parse * method to extract the individual identifiers contained within names that are returned. * * <p>This class can be customized by passing in a custom instance of MerchantReviewsServiceSettings * to create(). For example: * * <p>To customize credentials: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MerchantReviewsServiceSettings merchantReviewsServiceSettings = * MerchantReviewsServiceSettings.newBuilder() * .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials)) * .build(); * MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create(merchantReviewsServiceSettings); * }</pre> * * <p>To customize the endpoint: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MerchantReviewsServiceSettings merchantReviewsServiceSettings = * MerchantReviewsServiceSettings.newBuilder().setEndpoint(myEndpoint).build(); * MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create(merchantReviewsServiceSettings); * }</pre> * * <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over * the wire: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * MerchantReviewsServiceSettings merchantReviewsServiceSettings = * MerchantReviewsServiceSettings.newHttpJsonBuilder().build(); * MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create(merchantReviewsServiceSettings); * }</pre> * * <p>Please refer to the GitHub repository's samples for more quickstart code snippets. */ @BetaApi @Generated("by gapic-generator-java") public class MerchantReviewsServiceClient implements BackgroundResource { private final MerchantReviewsServiceSettings settings; private final MerchantReviewsServiceStub stub; /** Constructs an instance of MerchantReviewsServiceClient with default settings. */ public static final MerchantReviewsServiceClient create() throws IOException { return create(MerchantReviewsServiceSettings.newBuilder().build()); } /** * Constructs an instance of MerchantReviewsServiceClient, using the given settings. The channels * are created based on the settings passed in, or defaults for any settings that are not set. */ public static final MerchantReviewsServiceClient create(MerchantReviewsServiceSettings settings) throws IOException { return new MerchantReviewsServiceClient(settings); } /** * Constructs an instance of MerchantReviewsServiceClient, using the given stub for making calls. * This is for advanced usage - prefer using create(MerchantReviewsServiceSettings). */ public static final MerchantReviewsServiceClient create(MerchantReviewsServiceStub stub) { return new MerchantReviewsServiceClient(stub); } /** * Constructs an instance of MerchantReviewsServiceClient, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected MerchantReviewsServiceClient(MerchantReviewsServiceSettings settings) throws IOException { this.settings = settings; this.stub = ((MerchantReviewsServiceStubSettings) settings.getStubSettings()).createStub(); } protected MerchantReviewsServiceClient(MerchantReviewsServiceStub stub) { this.settings = null; this.stub = stub; } public final MerchantReviewsServiceSettings getSettings() { return settings; } public MerchantReviewsServiceStub getStub() { return stub; } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * MerchantReviewName name = MerchantReviewName.of("[ACCOUNT]", "[NAME]"); * MerchantReview response = merchantReviewsServiceClient.getMerchantReview(name); * } * }</pre> * * @param name Required. The ID of the merchant review. Format: * accounts/{account}/merchantReviews/{merchantReview} * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final MerchantReview getMerchantReview(MerchantReviewName name) { GetMerchantReviewRequest request = GetMerchantReviewRequest.newBuilder() .setName(name == null ? null : name.toString()) .build(); return getMerchantReview(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * String name = MerchantReviewName.of("[ACCOUNT]", "[NAME]").toString(); * MerchantReview response = merchantReviewsServiceClient.getMerchantReview(name); * } * }</pre> * * @param name Required. The ID of the merchant review. Format: * accounts/{account}/merchantReviews/{merchantReview} * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final MerchantReview getMerchantReview(String name) { GetMerchantReviewRequest request = GetMerchantReviewRequest.newBuilder().setName(name).build(); return getMerchantReview(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * GetMerchantReviewRequest request = * GetMerchantReviewRequest.newBuilder() * .setName(MerchantReviewName.of("[ACCOUNT]", "[NAME]").toString()) * .build(); * MerchantReview response = merchantReviewsServiceClient.getMerchantReview(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final MerchantReview getMerchantReview(GetMerchantReviewRequest request) { return getMerchantReviewCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Gets a merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * GetMerchantReviewRequest request = * GetMerchantReviewRequest.newBuilder() * .setName(MerchantReviewName.of("[ACCOUNT]", "[NAME]").toString()) * .build(); * ApiFuture<MerchantReview> future = * merchantReviewsServiceClient.getMerchantReviewCallable().futureCall(request); * // Do something. * MerchantReview response = future.get(); * } * }</pre> */ public final UnaryCallable<GetMerchantReviewRequest, MerchantReview> getMerchantReviewCallable() { return stub.getMerchantReviewCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists merchant reviews. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * AccountName parent = AccountName.of("[ACCOUNT]"); * for (MerchantReview element : * merchantReviewsServiceClient.listMerchantReviews(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The account to list merchant reviews for. Format: accounts/{account} * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListMerchantReviewsPagedResponse listMerchantReviews(AccountName parent) { ListMerchantReviewsRequest request = ListMerchantReviewsRequest.newBuilder() .setParent(parent == null ? null : parent.toString()) .build(); return listMerchantReviews(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists merchant reviews. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * String parent = AccountName.of("[ACCOUNT]").toString(); * for (MerchantReview element : * merchantReviewsServiceClient.listMerchantReviews(parent).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param parent Required. The account to list merchant reviews for. Format: accounts/{account} * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListMerchantReviewsPagedResponse listMerchantReviews(String parent) { ListMerchantReviewsRequest request = ListMerchantReviewsRequest.newBuilder().setParent(parent).build(); return listMerchantReviews(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists merchant reviews. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * ListMerchantReviewsRequest request = * ListMerchantReviewsRequest.newBuilder() * .setParent(AccountName.of("[ACCOUNT]").toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * for (MerchantReview element : * merchantReviewsServiceClient.listMerchantReviews(request).iterateAll()) { * // doThingsWith(element); * } * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final ListMerchantReviewsPagedResponse listMerchantReviews( ListMerchantReviewsRequest request) { return listMerchantReviewsPagedCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists merchant reviews. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * ListMerchantReviewsRequest request = * ListMerchantReviewsRequest.newBuilder() * .setParent(AccountName.of("[ACCOUNT]").toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * ApiFuture<MerchantReview> future = * merchantReviewsServiceClient.listMerchantReviewsPagedCallable().futureCall(request); * // Do something. * for (MerchantReview element : future.get().iterateAll()) { * // doThingsWith(element); * } * } * }</pre> */ public final UnaryCallable<ListMerchantReviewsRequest, ListMerchantReviewsPagedResponse> listMerchantReviewsPagedCallable() { return stub.listMerchantReviewsPagedCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Lists merchant reviews. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * ListMerchantReviewsRequest request = * ListMerchantReviewsRequest.newBuilder() * .setParent(AccountName.of("[ACCOUNT]").toString()) * .setPageSize(883849137) * .setPageToken("pageToken873572522") * .build(); * while (true) { * ListMerchantReviewsResponse response = * merchantReviewsServiceClient.listMerchantReviewsCallable().call(request); * for (MerchantReview element : response.getMerchantReviewsList()) { * // doThingsWith(element); * } * String nextPageToken = response.getNextPageToken(); * if (!Strings.isNullOrEmpty(nextPageToken)) { * request = request.toBuilder().setPageToken(nextPageToken).build(); * } else { * break; * } * } * } * }</pre> */ public final UnaryCallable<ListMerchantReviewsRequest, ListMerchantReviewsResponse> listMerchantReviewsCallable() { return stub.listMerchantReviewsCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Inserts a review for your Merchant Center account. If the review already exists, then the * review is replaced with the new instance. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * InsertMerchantReviewRequest request = * InsertMerchantReviewRequest.newBuilder() * .setParent("parent-995424086") * .setMerchantReview(MerchantReview.newBuilder().build()) * .setDataSource("dataSource1272470629") * .build(); * MerchantReview response = merchantReviewsServiceClient.insertMerchantReview(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final MerchantReview insertMerchantReview(InsertMerchantReviewRequest request) { return insertMerchantReviewCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Inserts a review for your Merchant Center account. If the review already exists, then the * review is replaced with the new instance. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * InsertMerchantReviewRequest request = * InsertMerchantReviewRequest.newBuilder() * .setParent("parent-995424086") * .setMerchantReview(MerchantReview.newBuilder().build()) * .setDataSource("dataSource1272470629") * .build(); * ApiFuture<MerchantReview> future = * merchantReviewsServiceClient.insertMerchantReviewCallable().futureCall(request); * // Do something. * MerchantReview response = future.get(); * } * }</pre> */ public final UnaryCallable<InsertMerchantReviewRequest, MerchantReview> insertMerchantReviewCallable() { return stub.insertMerchantReviewCallable(); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * MerchantReviewName name = MerchantReviewName.of("[ACCOUNT]", "[NAME]"); * merchantReviewsServiceClient.deleteMerchantReview(name); * } * }</pre> * * @param name Required. The ID of the merchant review. Format: * accounts/{account}/merchantReviews/{merchantReview} * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteMerchantReview(MerchantReviewName name) { DeleteMerchantReviewRequest request = DeleteMerchantReviewRequest.newBuilder() .setName(name == null ? null : name.toString()) .build(); deleteMerchantReview(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * String name = MerchantReviewName.of("[ACCOUNT]", "[NAME]").toString(); * merchantReviewsServiceClient.deleteMerchantReview(name); * } * }</pre> * * @param name Required. The ID of the merchant review. Format: * accounts/{account}/merchantReviews/{merchantReview} * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteMerchantReview(String name) { DeleteMerchantReviewRequest request = DeleteMerchantReviewRequest.newBuilder().setName(name).build(); deleteMerchantReview(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * DeleteMerchantReviewRequest request = * DeleteMerchantReviewRequest.newBuilder() * .setName(MerchantReviewName.of("[ACCOUNT]", "[NAME]").toString()) * .build(); * merchantReviewsServiceClient.deleteMerchantReview(request); * } * }</pre> * * @param request The request object containing all of the parameters for the API call. * @throws com.google.api.gax.rpc.ApiException if the remote call fails */ public final void deleteMerchantReview(DeleteMerchantReviewRequest request) { deleteMerchantReviewCallable().call(request); } // AUTO-GENERATED DOCUMENTATION AND METHOD. /** * Deletes merchant review. * * <p>Sample code: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * try (MerchantReviewsServiceClient merchantReviewsServiceClient = * MerchantReviewsServiceClient.create()) { * DeleteMerchantReviewRequest request = * DeleteMerchantReviewRequest.newBuilder() * .setName(MerchantReviewName.of("[ACCOUNT]", "[NAME]").toString()) * .build(); * ApiFuture<Empty> future = * merchantReviewsServiceClient.deleteMerchantReviewCallable().futureCall(request); * // Do something. * future.get(); * } * }</pre> */ public final UnaryCallable<DeleteMerchantReviewRequest, Empty> deleteMerchantReviewCallable() { return stub.deleteMerchantReviewCallable(); } @Override public final void close() { stub.close(); } @Override public void shutdown() { stub.shutdown(); } @Override public boolean isShutdown() { return stub.isShutdown(); } @Override public boolean isTerminated() { return stub.isTerminated(); } @Override public void shutdownNow() { stub.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return stub.awaitTermination(duration, unit); } public static class ListMerchantReviewsPagedResponse extends AbstractPagedListResponse< ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview, ListMerchantReviewsPage, ListMerchantReviewsFixedSizeCollection> { public static ApiFuture<ListMerchantReviewsPagedResponse> createAsync( PageContext<ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview> context, ApiFuture<ListMerchantReviewsResponse> futureResponse) { ApiFuture<ListMerchantReviewsPage> futurePage = ListMerchantReviewsPage.createEmptyPage().createPageAsync(context, futureResponse); return ApiFutures.transform( futurePage, input -> new ListMerchantReviewsPagedResponse(input), MoreExecutors.directExecutor()); } private ListMerchantReviewsPagedResponse(ListMerchantReviewsPage page) { super(page, ListMerchantReviewsFixedSizeCollection.createEmptyCollection()); } } public static class ListMerchantReviewsPage extends AbstractPage< ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview, ListMerchantReviewsPage> { private ListMerchantReviewsPage( PageContext<ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview> context, ListMerchantReviewsResponse response) { super(context, response); } private static ListMerchantReviewsPage createEmptyPage() { return new ListMerchantReviewsPage(null, null); } @Override protected ListMerchantReviewsPage createPage( PageContext<ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview> context, ListMerchantReviewsResponse response) { return new ListMerchantReviewsPage(context, response); } @Override public ApiFuture<ListMerchantReviewsPage> createPageAsync( PageContext<ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview> context, ApiFuture<ListMerchantReviewsResponse> futureResponse) { return super.createPageAsync(context, futureResponse); } } public static class ListMerchantReviewsFixedSizeCollection extends AbstractFixedSizeCollection< ListMerchantReviewsRequest, ListMerchantReviewsResponse, MerchantReview, ListMerchantReviewsPage, ListMerchantReviewsFixedSizeCollection> { private ListMerchantReviewsFixedSizeCollection( List<ListMerchantReviewsPage> pages, int collectionSize) { super(pages, collectionSize); } private static ListMerchantReviewsFixedSizeCollection createEmptyCollection() { return new ListMerchantReviewsFixedSizeCollection(null, 0); } @Override protected ListMerchantReviewsFixedSizeCollection createCollection( List<ListMerchantReviewsPage> pages, int collectionSize) { return new ListMerchantReviewsFixedSizeCollection(pages, collectionSize); } } }
apache/paimon
35,326
paimon-core/src/test/java/org/apache/paimon/table/source/DataSplitCompatibleTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.paimon.table.source; import org.apache.paimon.data.BinaryRow; import org.apache.paimon.data.BinaryRowWriter; import org.apache.paimon.data.BinaryString; import org.apache.paimon.data.Timestamp; import org.apache.paimon.io.DataFileMeta; import org.apache.paimon.io.DataFileTestDataGenerator; import org.apache.paimon.io.DataInputDeserializer; import org.apache.paimon.io.DataOutputViewStreamWrapper; import org.apache.paimon.manifest.FileSource; import org.apache.paimon.stats.SimpleStats; import org.apache.paimon.stats.SimpleStatsEvolutions; import org.apache.paimon.types.BigIntType; import org.apache.paimon.types.DataField; import org.apache.paimon.types.DoubleType; import org.apache.paimon.types.FloatType; import org.apache.paimon.types.IntType; import org.apache.paimon.types.SmallIntType; import org.apache.paimon.types.TimestampType; import org.apache.paimon.utils.IOUtils; import org.apache.paimon.utils.InstantiationUtil; import org.junit.jupiter.api.Test; import javax.annotation.Nullable; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ThreadLocalRandom; import static org.apache.paimon.data.BinaryArray.fromLongArray; import static org.apache.paimon.data.BinaryRow.singleColumn; import static org.assertj.core.api.Assertions.assertThat; /** Test for {@link DataSplit}. */ public class DataSplitCompatibleTest { @Test public void testSplitMergedRowCount() { // not rawConvertible List<DataFileMeta> dataFiles = Arrays.asList(newDataFile(1000L), newDataFile(2000L), newDataFile(3000L)); DataSplit split = newDataSplit(false, dataFiles, null); assertThat(split.partialMergedRowCount()).isEqualTo(0L); assertThat(split.mergedRowCountAvailable()).isEqualTo(false); // rawConvertible without deletion files split = newDataSplit(true, dataFiles, null); assertThat(split.partialMergedRowCount()).isEqualTo(6000L); assertThat(split.mergedRowCountAvailable()).isEqualTo(true); assertThat(split.mergedRowCount()).isEqualTo(6000L); // rawConvertible with deletion files without cardinality ArrayList<DeletionFile> deletionFiles = new ArrayList<>(); deletionFiles.add(null); deletionFiles.add(new DeletionFile("p", 1, 2, null)); deletionFiles.add(new DeletionFile("p", 1, 2, 100L)); split = newDataSplit(true, dataFiles, deletionFiles); assertThat(split.partialMergedRowCount()).isEqualTo(3900L); assertThat(split.mergedRowCountAvailable()).isEqualTo(false); // rawConvertible with deletion files with cardinality deletionFiles = new ArrayList<>(); deletionFiles.add(null); deletionFiles.add(new DeletionFile("p", 1, 2, 200L)); deletionFiles.add(new DeletionFile("p", 1, 2, 100L)); split = newDataSplit(true, dataFiles, deletionFiles); assertThat(split.partialMergedRowCount()).isEqualTo(5700L); assertThat(split.mergedRowCountAvailable()).isEqualTo(true); assertThat(split.mergedRowCount()).isEqualTo(5700L); } @Test public void testSplitMinMaxValue() { Map<Long, List<DataField>> schemas = new HashMap<>(); Timestamp minTs = Timestamp.fromLocalDateTime(LocalDateTime.parse("2025-01-01T00:00:00")); Timestamp maxTs1 = Timestamp.fromLocalDateTime(LocalDateTime.parse("2025-03-01T00:00:00")); Timestamp maxTs2 = Timestamp.fromLocalDateTime(LocalDateTime.parse("2025-03-12T00:00:00")); BinaryRow min1 = newBinaryRow(new Object[] {10, 123L, 888.0D, minTs}); BinaryRow max1 = newBinaryRow(new Object[] {99, 456L, 999.0D, maxTs1}); SimpleStats valueStats1 = new SimpleStats(min1, max1, fromLongArray(new Long[] {0L})); BinaryRow min2 = newBinaryRow(new Object[] {5, 0L, 777.0D, minTs}); BinaryRow max2 = newBinaryRow(new Object[] {90, 789L, 899.0D, maxTs2}); SimpleStats valueStats2 = new SimpleStats(min2, max2, fromLongArray(new Long[] {0L})); // test the common case. DataFileMeta d1 = newDataFile(100, valueStats1, null); DataFileMeta d2 = newDataFile(100, valueStats2, null); DataSplit split1 = newDataSplit(true, Arrays.asList(d1, d2), null); DataField intField = new DataField(0, "c_int", new IntType()); DataField longField = new DataField(1, "c_long", new BigIntType()); DataField doubleField = new DataField(2, "c_double", new DoubleType()); DataField tsField = new DataField(3, "c_ts", new TimestampType()); schemas.put(1L, Arrays.asList(intField, longField, doubleField, tsField)); SimpleStatsEvolutions evolutions = new SimpleStatsEvolutions(schemas::get, 1); assertThat(split1.minValue(0, intField, evolutions)).isEqualTo(5); assertThat(split1.maxValue(0, intField, evolutions)).isEqualTo(99); assertThat(split1.minValue(1, longField, evolutions)).isEqualTo(0L); assertThat(split1.maxValue(1, longField, evolutions)).isEqualTo(789L); assertThat(split1.minValue(2, doubleField, evolutions)).isEqualTo(777D); assertThat(split1.maxValue(2, doubleField, evolutions)).isEqualTo(999D); assertThat(split1.minValue(3, tsField, evolutions)).isEqualTo(minTs); assertThat(split1.maxValue(3, tsField, evolutions)).isEqualTo(maxTs2); // test the case which provide non-null valueStatsCol and there are different between file // schema and table schema. BinaryRow min3 = newBinaryRow(new Object[] {10, 123L, minTs}); BinaryRow max3 = newBinaryRow(new Object[] {99, 456L, maxTs1}); SimpleStats valueStats3 = new SimpleStats(min3, max3, fromLongArray(new Long[] {0L})); BinaryRow min4 = newBinaryRow(new Object[] {5, 0L, minTs}); BinaryRow max4 = newBinaryRow(new Object[] {90, 789L, maxTs2}); SimpleStats valueStats4 = new SimpleStats(min4, max4, fromLongArray(new Long[] {0L})); List<String> valueStatsCols2 = Arrays.asList("c_int", "c_long", "c_ts"); DataFileMeta d3 = newDataFile(100, valueStats3, valueStatsCols2); DataFileMeta d4 = newDataFile(100, valueStats4, valueStatsCols2); DataSplit split2 = newDataSplit(true, Arrays.asList(d3, d4), null); DataField smallField = new DataField(4, "c_small", new SmallIntType()); DataField floatField = new DataField(5, "c_float", new FloatType()); schemas.put(2L, Arrays.asList(intField, smallField, tsField, floatField)); evolutions = new SimpleStatsEvolutions(schemas::get, 2); assertThat(split2.minValue(0, intField, evolutions)).isEqualTo(5); assertThat(split2.maxValue(0, intField, evolutions)).isEqualTo(99); assertThat(split2.minValue(1, smallField, evolutions)).isEqualTo(null); assertThat(split2.maxValue(1, smallField, evolutions)).isEqualTo(null); assertThat(split2.minValue(2, tsField, evolutions)).isEqualTo(minTs); assertThat(split2.maxValue(2, tsField, evolutions)).isEqualTo(maxTs2); assertThat(split2.minValue(3, floatField, evolutions)).isEqualTo(null); assertThat(split2.maxValue(3, floatField, evolutions)).isEqualTo(null); } @Test public void testSplitNullCount() { Map<Long, List<DataField>> schemas = new HashMap<>(); DataField intField = new DataField(0, "c_int", new IntType()); DataField longField = new DataField(1, "c_long", new BigIntType()); schemas.put(1L, Arrays.asList(intField, longField)); // test common BinaryRow min1 = newBinaryRow(new Object[] {10, 123L}); BinaryRow max1 = newBinaryRow(new Object[] {99, 456L}); SimpleStats valueStats1 = new SimpleStats(min1, max1, fromLongArray(new Long[] {5L, 1L})); BinaryRow min2 = newBinaryRow(new Object[] {5, 0L}); BinaryRow max2 = newBinaryRow(new Object[] {90, 789L}); SimpleStats valueStats2 = new SimpleStats(min2, max2, fromLongArray(new Long[] {3L, 2L})); DataFileMeta d1 = newDataFile(100, valueStats1, null); DataFileMeta d2 = newDataFile(100, valueStats2, null); DataSplit split1 = newDataSplit(true, Arrays.asList(d1, d2), null); SimpleStatsEvolutions evolutions = new SimpleStatsEvolutions(schemas::get, 1); assertThat(split1.nullCount(0, evolutions)).isEqualTo(8); assertThat(split1.nullCount(1, evolutions)).isEqualTo(3); // test schema evolution DataField doubleField = new DataField(2, "c_double", new DoubleType()); schemas.put(2L, Arrays.asList(intField, longField, doubleField)); evolutions = new SimpleStatsEvolutions(schemas::get, 2); assertThat(split1.nullCount(0, evolutions)).isEqualTo(8); assertThat(split1.nullCount(1, evolutions)).isEqualTo(3); assertThat(split1.nullCount(2, evolutions)).isEqualTo(200); } @Test public void testSerializer() throws IOException { DataFileTestDataGenerator gen = DataFileTestDataGenerator.builder().build(); DataFileTestDataGenerator.Data data = gen.next(); List<DataFileMeta> files = new ArrayList<>(); for (int i = 0; i < ThreadLocalRandom.current().nextInt(10); i++) { files.add(gen.next().meta); } DataSplit split = DataSplit.builder() .withSnapshot(ThreadLocalRandom.current().nextLong(100)) .withPartition(data.partition) .withBucket(data.bucket) .withDataFiles(files) .withBucketPath("my path") .build(); ByteArrayOutputStream out = new ByteArrayOutputStream(); split.serialize(new DataOutputViewStreamWrapper(out)); DataSplit newSplit = DataSplit.deserialize(new DataInputDeserializer(out.toByteArray())); assertThat(newSplit).isEqualTo(split); } @Test public void testSerializerCompatibleV1() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, null, null, null, null, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withDataFiles(dataFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v2Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v1"), true); DataSplit actual = InstantiationUtil.deserializeObject(v2Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV2() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, null, null, null, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withDataFiles(dataFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v2Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v2"), true); DataSplit actual = InstantiationUtil.deserializeObject(v2Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV3() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, Arrays.asList("field1", "field2", "field3"), null, null, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); DeletionFile deletionFile = new DeletionFile("deletion_file", 100, 22, null); List<DeletionFile> deletionFiles = Collections.singletonList(deletionFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withDataFiles(dataFiles) .withDataDeletionFiles(deletionFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v2Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v3"), true); DataSplit actual = InstantiationUtil.deserializeObject(v2Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV4() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, Arrays.asList("field1", "field2", "field3"), null, null, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); DeletionFile deletionFile = new DeletionFile("deletion_file", 100, 22, 33L); List<DeletionFile> deletionFiles = Collections.singletonList(deletionFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withDataFiles(dataFiles) .withDataDeletionFiles(deletionFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v4Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v4"), true); DataSplit actual = InstantiationUtil.deserializeObject(v4Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV5() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, Arrays.asList("field1", "field2", "field3"), "hdfs:///path/to/warehouse", null, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); DeletionFile deletionFile = new DeletionFile("deletion_file", 100, 22, 33L); List<DeletionFile> deletionFiles = Collections.singletonList(deletionFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withDataFiles(dataFiles) .withDataDeletionFiles(deletionFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v5Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v5"), true); DataSplit actual = InstantiationUtil.deserializeObject(v5Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV6() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, Arrays.asList("field1", "field2", "field3"), "hdfs:///path/to/warehouse", null, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); DeletionFile deletionFile = new DeletionFile("deletion_file", 100, 22, 33L); List<DeletionFile> deletionFiles = Collections.singletonList(deletionFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withTotalBuckets(32) .withDataFiles(dataFiles) .withDataDeletionFiles(deletionFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v6Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v6"), true); DataSplit actual = InstantiationUtil.deserializeObject(v6Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV7() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, Arrays.asList("field1", "field2", "field3"), "hdfs:///path/to/warehouse", 12L, null); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); DeletionFile deletionFile = new DeletionFile("deletion_file", 100, 22, 33L); List<DeletionFile> deletionFiles = Collections.singletonList(deletionFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withTotalBuckets(32) .withDataFiles(dataFiles) .withDataDeletionFiles(deletionFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(split)).isEqualTo(split); byte[] v6Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v7"), true); DataSplit actual = InstantiationUtil.deserializeObject(v6Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } @Test public void testSerializerCompatibleV8() throws Exception { SimpleStats keyStats = new SimpleStats( singleColumn("min_key"), singleColumn("max_key"), fromLongArray(new Long[] {0L})); SimpleStats valueStats = new SimpleStats( singleColumn("min_value"), singleColumn("max_value"), fromLongArray(new Long[] {0L})); DataFileMeta dataFile = DataFileMeta.create( "my_file", 1024 * 1024, 1024, singleColumn("min_key"), singleColumn("max_key"), keyStats, valueStats, 15, 200, 5, 3, Arrays.asList("extra1", "extra2"), Timestamp.fromLocalDateTime(LocalDateTime.parse("2022-03-02T20:20:12")), 11L, new byte[] {1, 2, 4}, FileSource.COMPACT, Arrays.asList("field1", "field2", "field3"), "hdfs:///path/to/warehouse", 12L, Arrays.asList("a", "b", "c", "f")); List<DataFileMeta> dataFiles = Collections.singletonList(dataFile); DeletionFile deletionFile = new DeletionFile("deletion_file", 100, 22, 33L); List<DeletionFile> deletionFiles = Collections.singletonList(deletionFile); BinaryRow partition = new BinaryRow(1); BinaryRowWriter binaryRowWriter = new BinaryRowWriter(partition); binaryRowWriter.writeString(0, BinaryString.fromString("aaaaa")); binaryRowWriter.complete(); DataSplit split = DataSplit.builder() .withSnapshot(18) .withPartition(partition) .withBucket(20) .withTotalBuckets(32) .withDataFiles(dataFiles) .withDataDeletionFiles(deletionFiles) .withBucketPath("my path") .build(); assertThat(InstantiationUtil.clone(InstantiationUtil.clone(split))).isEqualTo(split); byte[] v6Bytes = IOUtils.readFully( DataSplitCompatibleTest.class .getClassLoader() .getResourceAsStream("compatibility/datasplit-v8"), true); DataSplit actual = InstantiationUtil.deserializeObject(v6Bytes, DataSplit.class.getClassLoader()); assertThat(actual).isEqualTo(split); } private DataFileMeta newDataFile(long rowCount) { return newDataFile(rowCount, null, null); } private DataFileMeta newDataFile( long rowCount, SimpleStats rowStats, @Nullable List<String> valueStatsCols) { return DataFileMeta.forAppend( "my_data_file.parquet", 1024 * 1024, rowCount, rowStats, 0L, rowCount - 1, 1, Collections.emptyList(), null, null, valueStatsCols, null, null, null); } private DataSplit newDataSplit( boolean rawConvertible, List<DataFileMeta> dataFiles, List<DeletionFile> deletionFiles) { DataSplit.Builder builder = DataSplit.builder(); builder.withSnapshot(1) .withPartition(BinaryRow.EMPTY_ROW) .withBucket(1) .withBucketPath("my path") .rawConvertible(rawConvertible) .withDataFiles(dataFiles); if (deletionFiles != null) { builder.withDataDeletionFiles(deletionFiles); } return builder.build(); } private BinaryRow newBinaryRow(Object[] objs) { BinaryRow row = new BinaryRow(objs.length); BinaryRowWriter writer = new BinaryRowWriter(row); writer.reset(); for (int i = 0; i < objs.length; i++) { if (objs[i] instanceof Integer) { writer.writeInt(i, (Integer) objs[i]); } else if (objs[i] instanceof Long) { writer.writeLong(i, (Long) objs[i]); } else if (objs[i] instanceof Float) { writer.writeFloat(i, (Float) objs[i]); } else if (objs[i] instanceof Double) { writer.writeDouble(i, (Double) objs[i]); } else if (objs[i] instanceof Timestamp) { writer.writeTimestamp(i, (Timestamp) objs[i], 5); } else { throw new UnsupportedOperationException("It's not supported."); } } writer.complete(); return row; } }
openjdk/jdk8
35,502
jaxp/src/com/sun/org/apache/xml/internal/dtm/DTM.java
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: DTM.java,v 1.2.4.1 2005/09/15 08:14:51 suresh_emailid Exp $ */ package com.sun.org.apache.xml.internal.dtm; import javax.xml.transform.SourceLocator; import com.sun.org.apache.xml.internal.utils.XMLString; /** * <code>DTM</code> is an XML document model expressed as a table * rather than an object tree. It attempts to provide an interface to * a parse tree that has very little object creation. (DTM * implementations may also support incremental construction of the * model, but that's hidden from the DTM API.) * * <p>Nodes in the DTM are identified by integer "handles". A handle must * be unique within a process, and carries both node identification and * document identification. It must be possible to compare two handles * (and thus their nodes) for identity with "==".</p> * * <p>Namespace URLs, local-names, and expanded-names can all be * represented by and tested as integer ID values. An expanded name * represents (and may or may not directly contain) a combination of * the URL ID, and the local-name ID. Note that the namespace URL id * can be 0, which should have the meaning that the namespace is null. * For consistancy, zero should not be used for a local-name index. </p> * * <p>Text content of a node is represented by an index and length, * permitting efficient storage such as a shared FastStringBuffer.</p> * * <p>The model of the tree, as well as the general navigation model, * is that of XPath 1.0, for the moment. The model will eventually be * adapted to match the XPath 2.0 data model, XML Schema, and * InfoSet.</p> * * <p>DTM does _not_ directly support the W3C's Document Object * Model. However, it attempts to come close enough that an * implementation of DTM can be created that wraps a DOM and vice * versa.</p> * * <p><strong>Please Note:</strong> The DTM API is still * <strong>Subject To Change.</strong> This wouldn't affect most * users, but might require updating some extensions.</p> * * <p> The largest change being contemplated is a reconsideration of * the Node Handle representation. We are still not entirely sure * that an integer packed with two numeric subfields is really the * best solution. It has been suggested that we move up to a Long, to * permit more nodes per document without having to reduce the number * of slots in the DTMManager. There's even been a proposal that we * replace these integers with "cursor" objects containing the * internal node id and a pointer to the actual DTM object; this might * reduce the need to continuously consult the DTMManager to retrieve * the latter, and might provide a useful "hook" back into normal Java * heap management. But changing this datatype would have huge impact * on Xalan's internals -- especially given Java's lack of C-style * typedefs -- so we won't cut over unless we're convinced the new * solution really would be an improvement!</p> * */ public interface DTM { /** * Null node handles are represented by this value. */ public static final int NULL = -1; // These nodeType mnemonics and values are deliberately the same as those // used by the DOM, for convenient mapping // // %REVIEW% Should we actually define these as initialized to, // eg. org.w3c.dom.Document.ELEMENT_NODE? /** * The node is a <code>Root</code>. */ public static final short ROOT_NODE = 0; /** * The node is an <code>Element</code>. */ public static final short ELEMENT_NODE = 1; /** * The node is an <code>Attr</code>. */ public static final short ATTRIBUTE_NODE = 2; /** * The node is a <code>Text</code> node. */ public static final short TEXT_NODE = 3; /** * The node is a <code>CDATASection</code>. */ public static final short CDATA_SECTION_NODE = 4; /** * The node is an <code>EntityReference</code>. */ public static final short ENTITY_REFERENCE_NODE = 5; /** * The node is an <code>Entity</code>. */ public static final short ENTITY_NODE = 6; /** * The node is a <code>ProcessingInstruction</code>. */ public static final short PROCESSING_INSTRUCTION_NODE = 7; /** * The node is a <code>Comment</code>. */ public static final short COMMENT_NODE = 8; /** * The node is a <code>Document</code>. */ public static final short DOCUMENT_NODE = 9; /** * The node is a <code>DocumentType</code>. */ public static final short DOCUMENT_TYPE_NODE = 10; /** * The node is a <code>DocumentFragment</code>. */ public static final short DOCUMENT_FRAGMENT_NODE = 11; /** * The node is a <code>Notation</code>. */ public static final short NOTATION_NODE = 12; /** * The node is a <code>namespace node</code>. Note that this is not * currently a node type defined by the DOM API. */ public static final short NAMESPACE_NODE = 13; /** * The number of valid nodetypes. */ public static final short NTYPES = 14; // ========= DTM Implementation Control Functions. ============== // %TBD% RETIRED -- do via setFeature if needed. Remove from impls. // public void setParseBlockSize(int blockSizeSuggestion); /** * Set an implementation dependent feature. * <p> * %REVIEW% Do we really expect to set features on DTMs? * * @param featureId A feature URL. * @param state true if this feature should be on, false otherwise. */ public void setFeature(String featureId, boolean state); /** * Set a run time property for this DTM instance. * * @param property a <code>String</code> value * @param value an <code>Object</code> value */ public void setProperty(String property, Object value); // ========= Document Navigation Functions ========= /** * This returns a stateless "traverser", that can navigate over an * XPath axis, though not in document order. * * @param axis One of Axes.ANCESTORORSELF, etc. * * @return A DTMAxisIterator, or null if the givin axis isn't supported. */ public DTMAxisTraverser getAxisTraverser(final int axis); /** * This is a shortcut to the iterators that implement * XPath axes. * Returns a bare-bones iterator that must be initialized * with a start node (using iterator.setStartNode()). * * @param axis One of Axes.ANCESTORORSELF, etc. * * @return A DTMAxisIterator, or null if the givin axis isn't supported. */ public DTMAxisIterator getAxisIterator(final int axis); /** * Get an iterator that can navigate over an XPath Axis, predicated by * the extended type ID. * * @param axis * @param type An extended type ID. * * @return A DTMAxisIterator, or null if the givin axis isn't supported. */ public DTMAxisIterator getTypedAxisIterator(final int axis, final int type); /** * Given a node handle, test if it has child nodes. * <p> %REVIEW% This is obviously useful at the DOM layer, where it * would permit testing this without having to create a proxy * node. It's less useful in the DTM API, where * (dtm.getFirstChild(nodeHandle)!=DTM.NULL) is just as fast and * almost as self-evident. But it's a convenience, and eases porting * of DOM code to DTM. </p> * * @param nodeHandle int Handle of the node. * @return int true if the given node has child nodes. */ public boolean hasChildNodes(int nodeHandle); /** * Given a node handle, get the handle of the node's first child. * * @param nodeHandle int Handle of the node. * @return int DTM node-number of first child, * or DTM.NULL to indicate none exists. */ public int getFirstChild(int nodeHandle); /** * Given a node handle, get the handle of the node's last child. * * @param nodeHandle int Handle of the node. * @return int Node-number of last child, * or DTM.NULL to indicate none exists. */ public int getLastChild(int nodeHandle); /** * Retrieves an attribute node by local name and namespace URI * * %TBD% Note that we currently have no way to support * the DOM's old getAttribute() call, which accesses only the qname. * * @param elementHandle Handle of the node upon which to look up this attribute. * @param namespaceURI The namespace URI of the attribute to * retrieve, or null. * @param name The local name of the attribute to * retrieve. * @return The attribute node handle with the specified name ( * <code>nodeName</code>) or <code>DTM.NULL</code> if there is no such * attribute. */ public int getAttributeNode(int elementHandle, String namespaceURI, String name); /** * Given a node handle, get the index of the node's first attribute. * * @param nodeHandle int Handle of the node. * @return Handle of first attribute, or DTM.NULL to indicate none exists. */ public int getFirstAttribute(int nodeHandle); /** * Given a node handle, get the index of the node's first namespace node. * * @param nodeHandle handle to node, which should probably be an element * node, but need not be. * * @param inScope true if all namespaces in scope should be * returned, false if only the node's own * namespace declarations should be returned. * @return handle of first namespace, * or DTM.NULL to indicate none exists. */ public int getFirstNamespaceNode(int nodeHandle, boolean inScope); /** * Given a node handle, advance to its next sibling. * @param nodeHandle int Handle of the node. * @return int Node-number of next sibling, * or DTM.NULL to indicate none exists. */ public int getNextSibling(int nodeHandle); /** * Given a node handle, find its preceeding sibling. * WARNING: DTM implementations may be asymmetric; in some, * this operation has been resolved by search, and is relatively expensive. * * @param nodeHandle the id of the node. * @return int Node-number of the previous sib, * or DTM.NULL to indicate none exists. */ public int getPreviousSibling(int nodeHandle); /** * Given a node handle, advance to the next attribute. If an * element, we advance to its first attribute; if an attr, we advance to * the next attr of the same element. * * @param nodeHandle int Handle of the node. * @return int DTM node-number of the resolved attr, * or DTM.NULL to indicate none exists. */ public int getNextAttribute(int nodeHandle); /** * Given a namespace handle, advance to the next namespace in the same scope * (local or local-plus-inherited, as selected by getFirstNamespaceNode) * * @param baseHandle handle to original node from where the first child * was relative to (needed to return nodes in document order). * @param namespaceHandle handle to node which must be of type * NAMESPACE_NODE. * NEEDSDOC @param inScope * @return handle of next namespace, * or DTM.NULL to indicate none exists. */ public int getNextNamespaceNode(int baseHandle, int namespaceHandle, boolean inScope); /** * Given a node handle, find its parent node. * * @param nodeHandle the id of the node. * @return int Node handle of parent, * or DTM.NULL to indicate none exists. */ public int getParent(int nodeHandle); /** * Given a DTM which contains only a single document, * find the Node Handle of the Document node. Note * that if the DTM is configured so it can contain multiple * documents, this call will return the Document currently * under construction -- but may return null if it's between * documents. Generally, you should use getOwnerDocument(nodeHandle) * or getDocumentRoot(nodeHandle) instead. * * @return int Node handle of document, or DTM.NULL if a shared DTM * can not tell us which Document is currently active. */ public int getDocument(); /** * Given a node handle, find the owning document node. This version mimics * the behavior of the DOM call by the same name. * * @param nodeHandle the id of the node. * @return int Node handle of owning document, or DTM.NULL if the node was * a Document. * @see #getDocumentRoot(int nodeHandle) */ public int getOwnerDocument(int nodeHandle); /** * Given a node handle, find the owning document node. * * @param nodeHandle the id of the node. * @return int Node handle of owning document, or the node itself if it was * a Document. (Note difference from DOM, where getOwnerDocument returns * null for the Document node.) * @see #getOwnerDocument(int nodeHandle) */ public int getDocumentRoot(int nodeHandle); /** * Get the string-value of a node as a String object * (see http://www.w3.org/TR/xpath#data-model * for the definition of a node's string-value). * * @param nodeHandle The node ID. * * @return A string object that represents the string-value of the given node. */ public XMLString getStringValue(int nodeHandle); /** * Get number of character array chunks in * the string-value of a node. * (see http://www.w3.org/TR/xpath#data-model * for the definition of a node's string-value). * Note that a single text node may have multiple text chunks. * * @param nodeHandle The node ID. * * @return number of character array chunks in * the string-value of a node. */ public int getStringValueChunkCount(int nodeHandle); /** * Get a character array chunk in the string-value of a node. * (see http://www.w3.org/TR/xpath#data-model * for the definition of a node's string-value). * Note that a single text node may have multiple text chunks. * * @param nodeHandle The node ID. * @param chunkIndex Which chunk to get. * @param startAndLen A two-integer array which, upon return, WILL * BE FILLED with values representing the chunk's start position * within the returned character buffer and the length of the chunk. * @return The character array buffer within which the chunk occurs, * setting startAndLen's contents as a side-effect. */ public char[] getStringValueChunk(int nodeHandle, int chunkIndex, int[] startAndLen); /** * Given a node handle, return an ID that represents the node's expanded name. * * @param nodeHandle The handle to the node in question. * * @return the expanded-name id of the node. */ public int getExpandedTypeID(int nodeHandle); /** * Given an expanded name, return an ID. If the expanded-name does not * exist in the internal tables, the entry will be created, and the ID will * be returned. Any additional nodes that are created that have this * expanded name will use this ID. * * NEEDSDOC @param namespace * NEEDSDOC @param localName * NEEDSDOC @param type * * @return the expanded-name id of the node. */ public int getExpandedTypeID(String namespace, String localName, int type); /** * Given an expanded-name ID, return the local name part. * * @param ExpandedNameID an ID that represents an expanded-name. * @return String Local name of this node. */ public String getLocalNameFromExpandedNameID(int ExpandedNameID); /** * Given an expanded-name ID, return the namespace URI part. * * @param ExpandedNameID an ID that represents an expanded-name. * @return String URI value of this node's namespace, or null if no * namespace was resolved. */ public String getNamespaceFromExpandedNameID(int ExpandedNameID); /** * Given a node handle, return its DOM-style node name. This will * include names such as #text or #document. * * @param nodeHandle the id of the node. * @return String Name of this node, which may be an empty string. * %REVIEW% Document when empty string is possible... */ public String getNodeName(int nodeHandle); /** * Given a node handle, return the XPath node name. This should be * the name as described by the XPath data model, NOT the DOM-style * name. * * @param nodeHandle the id of the node. * @return String Name of this node. */ public String getNodeNameX(int nodeHandle); /** * Given a node handle, return its DOM-style localname. * (As defined in Namespaces, this is the portion of the name after the * prefix, if present, or the whole node name if no prefix exists) * * @param nodeHandle the id of the node. * @return String Local name of this node. */ public String getLocalName(int nodeHandle); /** * Given a namespace handle, return the prefix that the namespace decl is * mapping. * Given a node handle, return the prefix used to map to the namespace. * (As defined in Namespaces, this is the portion of the name before any * colon character). * * <p> %REVIEW% Are you sure you want "" for no prefix? </p> * * @param nodeHandle the id of the node. * @return String prefix of this node's name, or "" if no explicit * namespace prefix was given. */ public String getPrefix(int nodeHandle); /** * Given a node handle, return its DOM-style namespace URI * (As defined in Namespaces, this is the declared URI which this node's * prefix -- or default in lieu thereof -- was mapped to.) * @param nodeHandle the id of the node. * @return String URI value of this node's namespace, or null if no * namespace was resolved. */ public String getNamespaceURI(int nodeHandle); /** * Given a node handle, return its node value. This is mostly * as defined by the DOM, but may ignore some conveniences. * <p> * @param nodeHandle The node id. * @return String Value of this node, or null if not * meaningful for this node type. */ public String getNodeValue(int nodeHandle); /** * Given a node handle, return its DOM-style node type. * * <p>%REVIEW% Generally, returning short is false economy. Return int?</p> * * @param nodeHandle The node id. * @return int Node type, as per the DOM's Node._NODE constants. */ public short getNodeType(int nodeHandle); /** * Get the depth level of this node in the tree (equals 1 for * a parentless node). * * @param nodeHandle The node id. * @return the number of ancestors, plus one * @xsl.usage internal */ public short getLevel(int nodeHandle); // ============== Document query functions ============== /** * Tests whether DTM DOM implementation implements a specific feature and * that feature is supported by this node. * @param feature The name of the feature to test. * @param version This is the version number of the feature to test. * If the version is not * specified, supporting any version of the feature will cause the * method to return <code>true</code>. * @return Returns <code>true</code> if the specified feature is * supported on this node, <code>false</code> otherwise. */ public boolean isSupported(String feature, String version); /** * Return the base URI of the document entity. If it is not known * (because the document was parsed from a socket connection or from * standard input, for example), the value of this property is unknown. * * @return the document base URI String object or null if unknown. */ public String getDocumentBaseURI(); /** * Set the base URI of the document entity. * * @param baseURI the document base URI String object or null if unknown. */ public void setDocumentBaseURI(String baseURI); /** * Return the system identifier of the document entity. If * it is not known, the value of this property is null. * * @param nodeHandle The node id, which can be any valid node handle. * @return the system identifier String object or null if unknown. */ public String getDocumentSystemIdentifier(int nodeHandle); /** * Return the name of the character encoding scheme * in which the document entity is expressed. * * @param nodeHandle The node id, which can be any valid node handle. * @return the document encoding String object. */ public String getDocumentEncoding(int nodeHandle); /** * Return an indication of the standalone status of the document, * either "yes" or "no". This property is derived from the optional * standalone document declaration in the XML declaration at the * beginning of the document entity, and has no value if there is no * standalone document declaration. * * @param nodeHandle The node id, which can be any valid node handle. * @return the document standalone String object, either "yes", "no", or null. */ public String getDocumentStandalone(int nodeHandle); /** * Return a string representing the XML version of the document. This * property is derived from the XML declaration optionally present at the * beginning of the document entity, and has no value if there is no XML * declaration. * * @param documentHandle the document handle * @return the document version String object */ public String getDocumentVersion(int documentHandle); /** * Return an indication of * whether the processor has read the complete DTD. Its value is a * boolean. If it is false, then certain properties (indicated in their * descriptions below) may be unknown. If it is true, those properties * are never unknown. * * @return <code>true</code> if all declarations were processed; * <code>false</code> otherwise. */ public boolean getDocumentAllDeclarationsProcessed(); /** * A document type declaration information item has the following properties: * * 1. [system identifier] The system identifier of the external subset, if * it exists. Otherwise this property has no value. * * @return the system identifier String object, or null if there is none. */ public String getDocumentTypeDeclarationSystemIdentifier(); /** * Return the public identifier of the external subset, * normalized as described in 4.2.2 External Entities [XML]. If there is * no external subset or if it has no public identifier, this property * has no value. * * @return the public identifier String object, or null if there is none. */ public String getDocumentTypeDeclarationPublicIdentifier(); /** * Returns the <code>Element</code> whose <code>ID</code> is given by * <code>elementId</code>. If no such element exists, returns * <code>DTM.NULL</code>. Behavior is not defined if more than one element * has this <code>ID</code>. Attributes (including those * with the name "ID") are not of type ID unless so defined by DTD/Schema * information available to the DTM implementation. * Implementations that do not know whether attributes are of type ID or * not are expected to return <code>DTM.NULL</code>. * * <p>%REVIEW% Presumably IDs are still scoped to a single document, * and this operation searches only within a single document, right? * Wouldn't want collisions between DTMs in the same process.</p> * * @param elementId The unique <code>id</code> value for an element. * @return The handle of the matching element. */ public int getElementById(String elementId); /** * The getUnparsedEntityURI function returns the URI of the unparsed * entity with the specified name in the same document as the context * node (see [3.3 Unparsed Entities]). It returns the empty string if * there is no such entity. * <p> * XML processors may choose to use the System Identifier (if one * is provided) to resolve the entity, rather than the URI in the * Public Identifier. The details are dependent on the processor, and * we would have to support some form of plug-in resolver to handle * this properly. Currently, we simply return the System Identifier if * present, and hope that it a usable URI or that our caller can * map it to one. * %REVIEW% Resolve Public Identifiers... or consider changing function name. * <p> * If we find a relative URI * reference, XML expects it to be resolved in terms of the base URI * of the document. The DOM doesn't do that for us, and it isn't * entirely clear whether that should be done here; currently that's * pushed up to a higher level of our application. (Note that DOM Level * 1 didn't store the document's base URI.) * %REVIEW% Consider resolving Relative URIs. * <p> * (The DOM's statement that "An XML processor may choose to * completely expand entities before the structure model is passed * to the DOM" refers only to parsed entities, not unparsed, and hence * doesn't affect this function.) * * @param name A string containing the Entity Name of the unparsed * entity. * * @return String containing the URI of the Unparsed Entity, or an * empty string if no such entity exists. */ public String getUnparsedEntityURI(String name); // ============== Boolean methods ================ /** * Return true if the xsl:strip-space or xsl:preserve-space was processed * during construction of the document contained in this DTM. * * NEEDSDOC ($objectName$) @return */ public boolean supportsPreStripping(); /** * Figure out whether nodeHandle2 should be considered as being later * in the document than nodeHandle1, in Document Order as defined * by the XPath model. This may not agree with the ordering defined * by other XML applications. * <p> * There are some cases where ordering isn't defined, and neither are * the results of this function -- though we'll generally return true. * <p> * %REVIEW% Make sure this does the right thing with attribute nodes!!! * <p> * %REVIEW% Consider renaming for clarity. Perhaps isDocumentOrder(a,b)? * * @param firstNodeHandle DOM Node to perform position comparison on. * @param secondNodeHandle DOM Node to perform position comparison on. * * @return false if secondNode comes before firstNode, otherwise return true. * You can think of this as * <code>(firstNode.documentOrderPosition &lt;= secondNode.documentOrderPosition)</code>. */ public boolean isNodeAfter(int firstNodeHandle, int secondNodeHandle); /** * 2. [element content whitespace] A boolean indicating whether a * text node represents white space appearing within element content * (see [XML], 2.10 "White Space Handling"). Note that validating * XML processors are required by XML 1.0 to provide this * information... but that DOM Level 2 did not support it, since it * depends on knowledge of the DTD which DOM2 could not guarantee * would be available. * <p> * If there is no declaration for the containing element, an XML * processor must assume that the whitespace could be meaningful and * return false. If no declaration has been read, but the [all * declarations processed] property of the document information item * is false (so there may be an unread declaration), then the value * of this property is indeterminate for white space characters and * should probably be reported as false. It is always false for text * nodes that contain anything other than (or in addition to) white * space. * <p> * Note too that it always returns false for non-Text nodes. * <p> * %REVIEW% Joe wants to rename this isWhitespaceInElementContent() for clarity * * @param nodeHandle the node ID. * @return <code>true</code> if the node definitely represents whitespace in * element content; <code>false</code> otherwise. */ public boolean isCharacterElementContentWhitespace(int nodeHandle); /** * 10. [all declarations processed] This property is not strictly speaking * part of the infoset of the document. Rather it is an indication of * whether the processor has read the complete DTD. Its value is a * boolean. If it is false, then certain properties (indicated in their * descriptions below) may be unknown. If it is true, those properties * are never unknown. * * @param documentHandle A node handle that must identify a document. * @return <code>true</code> if all declarations were processed; * <code>false</code> otherwise. */ public boolean isDocumentAllDeclarationsProcessed(int documentHandle); /** * 5. [specified] A flag indicating whether this attribute was actually * specified in the start-tag of its element, or was defaulted from the * DTD (or schema). * * @param attributeHandle The attribute handle * @return <code>true</code> if the attribute was specified; * <code>false</code> if it was defaulted or the handle doesn't * refer to an attribute node. */ public boolean isAttributeSpecified(int attributeHandle); // ========== Direct SAX Dispatch, for optimization purposes ======== /** * Directly call the * characters method on the passed ContentHandler for the * string-value of the given node (see http://www.w3.org/TR/xpath#data-model * for the definition of a node's string-value). Multiple calls to the * ContentHandler's characters methods may well occur for a single call to * this method. * * @param nodeHandle The node ID. * @param ch A non-null reference to a ContentHandler. * @param normalize true if the content should be normalized according to * the rules for the XPath * <a href="http://www.w3.org/TR/xpath#function-normalize-space">normalize-space</a> * function. * * @throws org.xml.sax.SAXException */ public void dispatchCharactersEvents( int nodeHandle, org.xml.sax.ContentHandler ch, boolean normalize) throws org.xml.sax.SAXException; /** * Directly create SAX parser events representing the XML content of * a DTM subtree. This is a "serialize" operation. * * @param nodeHandle The node ID. * @param ch A non-null reference to a ContentHandler. * * @throws org.xml.sax.SAXException */ public void dispatchToEvents(int nodeHandle, org.xml.sax.ContentHandler ch) throws org.xml.sax.SAXException; /** * Return an DOM node for the given node. * * @param nodeHandle The node ID. * * @return A node representation of the DTM node. */ public org.w3c.dom.Node getNode(int nodeHandle); // ==== Construction methods (may not be supported by some implementations!) ===== // %REVIEW% What response occurs if not supported? /** * @return true iff we're building this model incrementally (eg * we're partnered with a CoroutineParser) and thus require that the * transformation and the parse run simultaneously. Guidance to the * DTMManager. */ public boolean needsTwoThreads(); // %REVIEW% Do these appends make any sense, should we support a // wider set of methods (like the "append" methods in the // current DTMDocumentImpl draft), or should we just support SAX // listener interfaces? Should it be a separate interface to // make that distinction explicit? /** * Return this DTM's content handler, if it has one. * * @return null if this model doesn't respond to SAX events. */ public org.xml.sax.ContentHandler getContentHandler(); /** * Return this DTM's lexical handler, if it has one. * * %REVIEW% Should this return null if constrution already done/begun? * * @return null if this model doesn't respond to lexical SAX events. */ public org.xml.sax.ext.LexicalHandler getLexicalHandler(); /** * Return this DTM's EntityResolver, if it has one. * * @return null if this model doesn't respond to SAX entity ref events. */ public org.xml.sax.EntityResolver getEntityResolver(); /** * Return this DTM's DTDHandler, if it has one. * * @return null if this model doesn't respond to SAX dtd events. */ public org.xml.sax.DTDHandler getDTDHandler(); /** * Return this DTM's ErrorHandler, if it has one. * * @return null if this model doesn't respond to SAX error events. */ public org.xml.sax.ErrorHandler getErrorHandler(); /** * Return this DTM's DeclHandler, if it has one. * * @return null if this model doesn't respond to SAX Decl events. */ public org.xml.sax.ext.DeclHandler getDeclHandler(); /** * Append a child to "the end of the document". Please note that * the node is always cloned in a base DTM, since our basic behavior * is immutable so nodes can't be removed from their previous * location. * * <p> %REVIEW% DTM maintains an insertion cursor which * performs a depth-first tree walk as nodes come in, and this operation * is really equivalent to: * insertionCursor.appendChild(document.importNode(newChild))) * where the insert point is the last element that was appended (or * the last one popped back to by an end-element operation).</p> * * @param newChild Must be a valid new node handle. * @param clone true if the child should be cloned into the document. * @param cloneDepth if the clone argument is true, specifies that the * clone should include all it's children. */ public void appendChild(int newChild, boolean clone, boolean cloneDepth); /** * Append a text node child that will be constructed from a string, * to the end of the document. Behavior is otherwise like appendChild(). * * @param str Non-null reference to a string. */ public void appendTextChild(String str); /** * Get the location of a node in the source document. * * @param node an <code>int</code> value * @return a <code>SourceLocator</code> value or null if no location * is available */ public SourceLocator getSourceLocatorFor(int node); /** * As the DTM is registered with the DTMManager, this method * will be called. This will give the DTM implementation a * chance to initialize any subsystems that are required to * build the DTM */ public void documentRegistration(); /** * As documents are released from the DTMManager, the DTM implementation * will be notified of the event. This will allow the DTM implementation * to shutdown any subsystem activity that may of been assoiated with * the active DTM Implementation. */ public void documentRelease(); /** * Migrate a DTM built with an old DTMManager to a new DTMManager. * After the migration, the new DTMManager will treat the DTM as * one that is built by itself. * This is used to support DTM sharing between multiple transformations. * @param manager the DTMManager */ public void migrateTo(DTMManager manager); }
googleapis/google-cloud-java
35,324
java-video-intelligence/proto-google-cloud-video-intelligence-v1p2beta1/src/main/java/com/google/cloud/videointelligence/v1p2beta1/ObjectTrackingFrame.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1p2beta1; /** * * * <pre> * Video frame level annotations for object detection and tracking. This field * stores per frame location, time offset, and confidence. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame} */ public final class ObjectTrackingFrame extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) ObjectTrackingFrameOrBuilder { private static final long serialVersionUID = 0L; // Use ObjectTrackingFrame.newBuilder() to construct. private ObjectTrackingFrame(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ObjectTrackingFrame() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ObjectTrackingFrame(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.class, com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.Builder.class); } private int bitField0_; public static final int NORMALIZED_BOUNDING_BOX_FIELD_NUMBER = 1; private com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalizedBoundingBox_; /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return Whether the normalizedBoundingBox field is set. */ @java.lang.Override public boolean hasNormalizedBoundingBox() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return The normalizedBoundingBox. */ @java.lang.Override public com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox getNormalizedBoundingBox() { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.getDefaultInstance() : normalizedBoundingBox_; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ @java.lang.Override public com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBoxOrBuilder getNormalizedBoundingBoxOrBuilder() { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.getDefaultInstance() : normalizedBoundingBox_; } public static final int TIME_OFFSET_FIELD_NUMBER = 2; private com.google.protobuf.Duration timeOffset_; /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return Whether the timeOffset field is set. */ @java.lang.Override public boolean hasTimeOffset() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return The timeOffset. */ @java.lang.Override public com.google.protobuf.Duration getTimeOffset() { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getTimeOffsetOrBuilder() { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNormalizedBoundingBox()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getTimeOffset()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNormalizedBoundingBox()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTimeOffset()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame)) { return super.equals(obj); } com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame other = (com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) obj; if (hasNormalizedBoundingBox() != other.hasNormalizedBoundingBox()) return false; if (hasNormalizedBoundingBox()) { if (!getNormalizedBoundingBox().equals(other.getNormalizedBoundingBox())) return false; } if (hasTimeOffset() != other.hasTimeOffset()) return false; if (hasTimeOffset()) { if (!getTimeOffset().equals(other.getTimeOffset())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNormalizedBoundingBox()) { hash = (37 * hash) + NORMALIZED_BOUNDING_BOX_FIELD_NUMBER; hash = (53 * hash) + getNormalizedBoundingBox().hashCode(); } if (hasTimeOffset()) { hash = (37 * hash) + TIME_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getTimeOffset().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Video frame level annotations for object detection and tracking. This field * stores per frame location, time offset, and confidence. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.class, com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.Builder.class); } // Construct using com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getNormalizedBoundingBoxFieldBuilder(); getTimeOffsetFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; normalizedBoundingBox_ = null; if (normalizedBoundingBoxBuilder_ != null) { normalizedBoundingBoxBuilder_.dispose(); normalizedBoundingBoxBuilder_ = null; } timeOffset_ = null; if (timeOffsetBuilder_ != null) { timeOffsetBuilder_.dispose(); timeOffsetBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame build() { com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame buildPartial() { com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame result = new com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.normalizedBoundingBox_ = normalizedBoundingBoxBuilder_ == null ? normalizedBoundingBox_ : normalizedBoundingBoxBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.timeOffset_ = timeOffsetBuilder_ == null ? timeOffset_ : timeOffsetBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) { return mergeFrom((com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame other) { if (other == com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame.getDefaultInstance()) return this; if (other.hasNormalizedBoundingBox()) { mergeNormalizedBoundingBox(other.getNormalizedBoundingBox()); } if (other.hasTimeOffset()) { mergeTimeOffset(other.getTimeOffset()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNormalizedBoundingBoxFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getTimeOffsetFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalizedBoundingBox_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox, com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.Builder, com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBoxOrBuilder> normalizedBoundingBoxBuilder_; /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return Whether the normalizedBoundingBox field is set. */ public boolean hasNormalizedBoundingBox() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return The normalizedBoundingBox. */ public com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox getNormalizedBoundingBox() { if (normalizedBoundingBoxBuilder_ == null) { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox .getDefaultInstance() : normalizedBoundingBox_; } else { return normalizedBoundingBoxBuilder_.getMessage(); } } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder setNormalizedBoundingBox( com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox value) { if (normalizedBoundingBoxBuilder_ == null) { if (value == null) { throw new NullPointerException(); } normalizedBoundingBox_ = value; } else { normalizedBoundingBoxBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder setNormalizedBoundingBox( com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.Builder builderForValue) { if (normalizedBoundingBoxBuilder_ == null) { normalizedBoundingBox_ = builderForValue.build(); } else { normalizedBoundingBoxBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder mergeNormalizedBoundingBox( com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox value) { if (normalizedBoundingBoxBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && normalizedBoundingBox_ != null && normalizedBoundingBox_ != com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox .getDefaultInstance()) { getNormalizedBoundingBoxBuilder().mergeFrom(value); } else { normalizedBoundingBox_ = value; } } else { normalizedBoundingBoxBuilder_.mergeFrom(value); } if (normalizedBoundingBox_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder clearNormalizedBoundingBox() { bitField0_ = (bitField0_ & ~0x00000001); normalizedBoundingBox_ = null; if (normalizedBoundingBoxBuilder_ != null) { normalizedBoundingBoxBuilder_.dispose(); normalizedBoundingBoxBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.Builder getNormalizedBoundingBoxBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNormalizedBoundingBoxFieldBuilder().getBuilder(); } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBoxOrBuilder getNormalizedBoundingBoxOrBuilder() { if (normalizedBoundingBoxBuilder_ != null) { return normalizedBoundingBoxBuilder_.getMessageOrBuilder(); } else { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox .getDefaultInstance() : normalizedBoundingBox_; } } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox, com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.Builder, com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBoxOrBuilder> getNormalizedBoundingBoxFieldBuilder() { if (normalizedBoundingBoxBuilder_ == null) { normalizedBoundingBoxBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox, com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBox.Builder, com.google.cloud.videointelligence.v1p2beta1.NormalizedBoundingBoxOrBuilder>( getNormalizedBoundingBox(), getParentForChildren(), isClean()); normalizedBoundingBox_ = null; } return normalizedBoundingBoxBuilder_; } private com.google.protobuf.Duration timeOffset_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> timeOffsetBuilder_; /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return Whether the timeOffset field is set. */ public boolean hasTimeOffset() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return The timeOffset. */ public com.google.protobuf.Duration getTimeOffset() { if (timeOffsetBuilder_ == null) { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } else { return timeOffsetBuilder_.getMessage(); } } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder setTimeOffset(com.google.protobuf.Duration value) { if (timeOffsetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeOffset_ = value; } else { timeOffsetBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder setTimeOffset(com.google.protobuf.Duration.Builder builderForValue) { if (timeOffsetBuilder_ == null) { timeOffset_ = builderForValue.build(); } else { timeOffsetBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder mergeTimeOffset(com.google.protobuf.Duration value) { if (timeOffsetBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && timeOffset_ != null && timeOffset_ != com.google.protobuf.Duration.getDefaultInstance()) { getTimeOffsetBuilder().mergeFrom(value); } else { timeOffset_ = value; } } else { timeOffsetBuilder_.mergeFrom(value); } if (timeOffset_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder clearTimeOffset() { bitField0_ = (bitField0_ & ~0x00000002); timeOffset_ = null; if (timeOffsetBuilder_ != null) { timeOffsetBuilder_.dispose(); timeOffsetBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public com.google.protobuf.Duration.Builder getTimeOffsetBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTimeOffsetFieldBuilder().getBuilder(); } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public com.google.protobuf.DurationOrBuilder getTimeOffsetOrBuilder() { if (timeOffsetBuilder_ != null) { return timeOffsetBuilder_.getMessageOrBuilder(); } else { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getTimeOffsetFieldBuilder() { if (timeOffsetBuilder_ == null) { timeOffsetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getTimeOffset(), getParentForChildren(), isClean()); timeOffset_ = null; } return timeOffsetBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame) private static final com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame(); } public static com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ObjectTrackingFrame> PARSER = new com.google.protobuf.AbstractParser<ObjectTrackingFrame>() { @java.lang.Override public ObjectTrackingFrame parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ObjectTrackingFrame> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ObjectTrackingFrame> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1p2beta1.ObjectTrackingFrame getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,324
java-video-intelligence/proto-google-cloud-video-intelligence-v1p3beta1/src/main/java/com/google/cloud/videointelligence/v1p3beta1/ObjectTrackingFrame.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.videointelligence.v1p3beta1; /** * * * <pre> * Video frame level annotations for object detection and tracking. This field * stores per frame location, time offset, and confidence. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame} */ public final class ObjectTrackingFrame extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) ObjectTrackingFrameOrBuilder { private static final long serialVersionUID = 0L; // Use ObjectTrackingFrame.newBuilder() to construct. private ObjectTrackingFrame(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ObjectTrackingFrame() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ObjectTrackingFrame(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_ObjectTrackingFrame_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_ObjectTrackingFrame_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.class, com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.Builder.class); } private int bitField0_; public static final int NORMALIZED_BOUNDING_BOX_FIELD_NUMBER = 1; private com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalizedBoundingBox_; /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return Whether the normalizedBoundingBox field is set. */ @java.lang.Override public boolean hasNormalizedBoundingBox() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return The normalizedBoundingBox. */ @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox getNormalizedBoundingBox() { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.getDefaultInstance() : normalizedBoundingBox_; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBoxOrBuilder getNormalizedBoundingBoxOrBuilder() { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.getDefaultInstance() : normalizedBoundingBox_; } public static final int TIME_OFFSET_FIELD_NUMBER = 2; private com.google.protobuf.Duration timeOffset_; /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return Whether the timeOffset field is set. */ @java.lang.Override public boolean hasTimeOffset() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return The timeOffset. */ @java.lang.Override public com.google.protobuf.Duration getTimeOffset() { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getTimeOffsetOrBuilder() { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNormalizedBoundingBox()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getTimeOffset()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNormalizedBoundingBox()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTimeOffset()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame)) { return super.equals(obj); } com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame other = (com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) obj; if (hasNormalizedBoundingBox() != other.hasNormalizedBoundingBox()) return false; if (hasNormalizedBoundingBox()) { if (!getNormalizedBoundingBox().equals(other.getNormalizedBoundingBox())) return false; } if (hasTimeOffset() != other.hasTimeOffset()) return false; if (hasTimeOffset()) { if (!getTimeOffset().equals(other.getTimeOffset())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNormalizedBoundingBox()) { hash = (37 * hash) + NORMALIZED_BOUNDING_BOX_FIELD_NUMBER; hash = (53 * hash) + getNormalizedBoundingBox().hashCode(); } if (hasTimeOffset()) { hash = (37 * hash) + TIME_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getTimeOffset().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Video frame level annotations for object detection and tracking. This field * stores per frame location, time offset, and confidence. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrameOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_ObjectTrackingFrame_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_ObjectTrackingFrame_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.class, com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.Builder.class); } // Construct using com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getNormalizedBoundingBoxFieldBuilder(); getTimeOffsetFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; normalizedBoundingBox_ = null; if (normalizedBoundingBoxBuilder_ != null) { normalizedBoundingBoxBuilder_.dispose(); normalizedBoundingBoxBuilder_ = null; } timeOffset_ = null; if (timeOffsetBuilder_ != null) { timeOffsetBuilder_.dispose(); timeOffsetBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1p3beta1_ObjectTrackingFrame_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame build() { com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame buildPartial() { com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame result = new com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.normalizedBoundingBox_ = normalizedBoundingBoxBuilder_ == null ? normalizedBoundingBox_ : normalizedBoundingBoxBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.timeOffset_ = timeOffsetBuilder_ == null ? timeOffset_ : timeOffsetBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) { return mergeFrom((com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame other) { if (other == com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame.getDefaultInstance()) return this; if (other.hasNormalizedBoundingBox()) { mergeNormalizedBoundingBox(other.getNormalizedBoundingBox()); } if (other.hasTimeOffset()) { mergeTimeOffset(other.getTimeOffset()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNormalizedBoundingBoxFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getTimeOffsetFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalizedBoundingBox_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox, com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.Builder, com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBoxOrBuilder> normalizedBoundingBoxBuilder_; /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return Whether the normalizedBoundingBox field is set. */ public boolean hasNormalizedBoundingBox() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> * * @return The normalizedBoundingBox. */ public com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox getNormalizedBoundingBox() { if (normalizedBoundingBoxBuilder_ == null) { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox .getDefaultInstance() : normalizedBoundingBox_; } else { return normalizedBoundingBoxBuilder_.getMessage(); } } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder setNormalizedBoundingBox( com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox value) { if (normalizedBoundingBoxBuilder_ == null) { if (value == null) { throw new NullPointerException(); } normalizedBoundingBox_ = value; } else { normalizedBoundingBoxBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder setNormalizedBoundingBox( com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.Builder builderForValue) { if (normalizedBoundingBoxBuilder_ == null) { normalizedBoundingBox_ = builderForValue.build(); } else { normalizedBoundingBoxBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder mergeNormalizedBoundingBox( com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox value) { if (normalizedBoundingBoxBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && normalizedBoundingBox_ != null && normalizedBoundingBox_ != com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox .getDefaultInstance()) { getNormalizedBoundingBoxBuilder().mergeFrom(value); } else { normalizedBoundingBox_ = value; } } else { normalizedBoundingBoxBuilder_.mergeFrom(value); } if (normalizedBoundingBox_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public Builder clearNormalizedBoundingBox() { bitField0_ = (bitField0_ & ~0x00000001); normalizedBoundingBox_ = null; if (normalizedBoundingBoxBuilder_ != null) { normalizedBoundingBoxBuilder_.dispose(); normalizedBoundingBoxBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.Builder getNormalizedBoundingBoxBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNormalizedBoundingBoxFieldBuilder().getBuilder(); } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ public com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBoxOrBuilder getNormalizedBoundingBoxOrBuilder() { if (normalizedBoundingBoxBuilder_ != null) { return normalizedBoundingBoxBuilder_.getMessageOrBuilder(); } else { return normalizedBoundingBox_ == null ? com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox .getDefaultInstance() : normalizedBoundingBox_; } } /** * * * <pre> * The normalized bounding box location of this object track for the frame. * </pre> * * <code> * .google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox normalized_bounding_box = 1; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox, com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.Builder, com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBoxOrBuilder> getNormalizedBoundingBoxFieldBuilder() { if (normalizedBoundingBoxBuilder_ == null) { normalizedBoundingBoxBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox, com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox.Builder, com.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBoxOrBuilder>( getNormalizedBoundingBox(), getParentForChildren(), isClean()); normalizedBoundingBox_ = null; } return normalizedBoundingBoxBuilder_; } private com.google.protobuf.Duration timeOffset_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> timeOffsetBuilder_; /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return Whether the timeOffset field is set. */ public boolean hasTimeOffset() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> * * @return The timeOffset. */ public com.google.protobuf.Duration getTimeOffset() { if (timeOffsetBuilder_ == null) { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } else { return timeOffsetBuilder_.getMessage(); } } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder setTimeOffset(com.google.protobuf.Duration value) { if (timeOffsetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } timeOffset_ = value; } else { timeOffsetBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder setTimeOffset(com.google.protobuf.Duration.Builder builderForValue) { if (timeOffsetBuilder_ == null) { timeOffset_ = builderForValue.build(); } else { timeOffsetBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder mergeTimeOffset(com.google.protobuf.Duration value) { if (timeOffsetBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && timeOffset_ != null && timeOffset_ != com.google.protobuf.Duration.getDefaultInstance()) { getTimeOffsetBuilder().mergeFrom(value); } else { timeOffset_ = value; } } else { timeOffsetBuilder_.mergeFrom(value); } if (timeOffset_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public Builder clearTimeOffset() { bitField0_ = (bitField0_ & ~0x00000002); timeOffset_ = null; if (timeOffsetBuilder_ != null) { timeOffsetBuilder_.dispose(); timeOffsetBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public com.google.protobuf.Duration.Builder getTimeOffsetBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTimeOffsetFieldBuilder().getBuilder(); } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ public com.google.protobuf.DurationOrBuilder getTimeOffsetOrBuilder() { if (timeOffsetBuilder_ != null) { return timeOffsetBuilder_.getMessageOrBuilder(); } else { return timeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : timeOffset_; } } /** * * * <pre> * The timestamp of the frame in microseconds. * </pre> * * <code>.google.protobuf.Duration time_offset = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getTimeOffsetFieldBuilder() { if (timeOffsetBuilder_ == null) { timeOffsetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getTimeOffset(), getParentForChildren(), isClean()); timeOffset_ = null; } return timeOffsetBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame) private static final com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame(); } public static com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ObjectTrackingFrame> PARSER = new com.google.protobuf.AbstractParser<ObjectTrackingFrame>() { @java.lang.Override public ObjectTrackingFrame parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ObjectTrackingFrame> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ObjectTrackingFrame> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
35,349
hotspot/agent/src/share/classes/sun/jvm/hotspot/HotSpotTypeDataBase.java
/* * Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ package sun.jvm.hotspot; import java.io.*; import java.util.*; import sun.jvm.hotspot.debugger.*; import sun.jvm.hotspot.types.*; import sun.jvm.hotspot.types.basic.*; import sun.jvm.hotspot.utilities.*; /** <P> This is the cross-platform TypeDataBase used by the Oop hierarchy. The decision was made to make this cross-platform by having the VM export the necessary symbols via a built-in table; see src/share/vm/runtime/vmStructs.[ch]pp for more details. </P> <P> <B>WARNING</B>: clients should refer to this class through the TypeDataBase interface and not directly to the HotSpotTypeDataBase type. </P> <P> NOTE: since we are fetching the sizes of the Java primitive types */ public class HotSpotTypeDataBase extends BasicTypeDataBase { private Debugger symbolLookup; private String[] jvmLibNames; private static final int UNINITIALIZED_SIZE = -1; private static final int C_INT8_SIZE = 1; private static final int C_INT32_SIZE = 4; private static final int C_INT64_SIZE = 8; private static int pointerSize = UNINITIALIZED_SIZE; private static final boolean DEBUG; static { DEBUG = System.getProperty("sun.jvm.hotspot.HotSpotTypeDataBase.DEBUG") != null; } /** <P> This requires a SymbolLookup mechanism as well as the MachineDescription. Note that we do not need a NameMangler since we use the vmStructs mechanism to avoid looking up C++ symbols. </P> <P> NOTE that it is guaranteed that this constructor will not attempt to fetch any Java values from the remote process, only C integers and addresses. This is required because we are fetching the sizes of the Java primitive types from the remote process, implying that attempting to fetch them before their sizes are known is illegal. </P> <P> Throws NoSuchSymbolException if a problem occurred while looking up one of the bootstrapping symbols related to the VMStructs table in the remote VM; this may indicate that the remote process is not actually a HotSpot VM. </P> */ public HotSpotTypeDataBase(MachineDescription machDesc, VtblAccess vtblAccess, Debugger symbolLookup, String[] jvmLibNames) throws NoSuchSymbolException { super(machDesc, vtblAccess); this.symbolLookup = symbolLookup; this.jvmLibNames = jvmLibNames; readVMTypes(); initializePrimitiveTypes(); readVMStructs(); readVMIntConstants(); readVMLongConstants(); readExternalDefinitions(); } public Type lookupType(String cTypeName, boolean throwException) { Type fieldType = super.lookupType(cTypeName, false); if (fieldType == null && cTypeName.startsWith("const ")) { fieldType = (BasicType)lookupType(cTypeName.substring(6), false); } if (fieldType == null && cTypeName.endsWith(" const")) { fieldType = (BasicType)lookupType(cTypeName.substring(0, cTypeName.length() - 6), false); } if (fieldType == null) { if (cTypeName.startsWith("GrowableArray<") && cTypeName.endsWith(">")) { String ttype = cTypeName.substring("GrowableArray<".length(), cTypeName.length() - 1); Type templateType = lookupType(ttype, false); if (templateType == null && typeNameIsPointerType(ttype)) { templateType = recursiveCreateBasicPointerType(ttype); } if (templateType == null) { lookupOrFail(ttype); } BasicType basicTargetType = createBasicType(cTypeName, false, false, false); // transfer fields from GenericGrowableArray to template instance BasicType generic = lookupOrFail("GenericGrowableArray"); BasicType specific = lookupOrFail("GrowableArray<int>"); basicTargetType.setSize(specific.getSize()); Iterator fields = generic.getFields(); while (fields.hasNext()) { Field f = (Field)fields.next(); basicTargetType.addField(internalCreateField(basicTargetType, f.getName(), f.getType(), f.isStatic(), f.getOffset(), null)); } fieldType = basicTargetType; } } if (fieldType == null && typeNameIsPointerType(cTypeName)) { fieldType = recursiveCreateBasicPointerType(cTypeName); } if (fieldType == null && throwException) { super.lookupType(cTypeName, true); } return fieldType; } private void readVMTypes() { // Get the variables we need in order to traverse the VMTypeEntry[] long typeEntryTypeNameOffset; long typeEntrySuperclassNameOffset; long typeEntryIsOopTypeOffset; long typeEntryIsIntegerTypeOffset; long typeEntryIsUnsignedOffset; long typeEntrySizeOffset; long typeEntryArrayStride; // Fetch the address of the VMTypeEntry*. We get this symbol first // and try to use it to make sure that symbol lookup is working. Address entryAddr = lookupInProcess("gHotSpotVMTypes"); // System.err.println("gHotSpotVMTypes address = " + entryAddr); // Dereference this once to get the pointer to the first VMTypeEntry // dumpMemory(entryAddr, 80); entryAddr = entryAddr.getAddressAt(0); if (entryAddr == null) { throw new RuntimeException("gHotSpotVMTypes was not initialized properly in the remote process; can not continue"); } typeEntryTypeNameOffset = getLongValueFromProcess("gHotSpotVMTypeEntryTypeNameOffset"); typeEntrySuperclassNameOffset = getLongValueFromProcess("gHotSpotVMTypeEntrySuperclassNameOffset"); typeEntryIsOopTypeOffset = getLongValueFromProcess("gHotSpotVMTypeEntryIsOopTypeOffset"); typeEntryIsIntegerTypeOffset = getLongValueFromProcess("gHotSpotVMTypeEntryIsIntegerTypeOffset"); typeEntryIsUnsignedOffset = getLongValueFromProcess("gHotSpotVMTypeEntryIsUnsignedOffset"); typeEntrySizeOffset = getLongValueFromProcess("gHotSpotVMTypeEntrySizeOffset"); typeEntryArrayStride = getLongValueFromProcess("gHotSpotVMTypeEntryArrayStride"); // Start iterating down it until we find an entry with no name Address typeNameAddr = null; do { // Fetch the type name first typeNameAddr = entryAddr.getAddressAt(typeEntryTypeNameOffset); if (typeNameAddr != null) { String typeName = CStringUtilities.getString(typeNameAddr); String superclassName = null; Address superclassNameAddr = entryAddr.getAddressAt(typeEntrySuperclassNameOffset); if (superclassNameAddr != null) { superclassName = CStringUtilities.getString(superclassNameAddr); } boolean isOopType = (entryAddr.getCIntegerAt(typeEntryIsOopTypeOffset, C_INT32_SIZE, false) != 0); boolean isIntegerType = (entryAddr.getCIntegerAt(typeEntryIsIntegerTypeOffset, C_INT32_SIZE, false) != 0); boolean isUnsigned = (entryAddr.getCIntegerAt(typeEntryIsUnsignedOffset, C_INT32_SIZE, false) != 0); long size = entryAddr.getCIntegerAt(typeEntrySizeOffset, C_INT64_SIZE, true); createType(typeName, superclassName, isOopType, isIntegerType, isUnsigned, size); if (pointerSize == UNINITIALIZED_SIZE && typeName.equals("void*")) { pointerSize = (int)size; } } entryAddr = entryAddr.addOffsetTo(typeEntryArrayStride); } while (typeNameAddr != null); } private void initializePrimitiveTypes() { // Look up the needed primitive types by name...they had better be present setJBooleanType(lookupPrimitiveType("jboolean")); setJByteType (lookupPrimitiveType("jbyte")); setJCharType (lookupPrimitiveType("jchar")); setJDoubleType (lookupPrimitiveType("jdouble")); setJFloatType (lookupPrimitiveType("jfloat")); setJIntType (lookupPrimitiveType("jint")); setJLongType (lookupPrimitiveType("jlong")); setJShortType (lookupPrimitiveType("jshort")); // Indicate that these are the Java primitive types ((BasicType) getJBooleanType()).setIsJavaPrimitiveType(true); ((BasicType) getJByteType()).setIsJavaPrimitiveType(true); ((BasicType) getJCharType()).setIsJavaPrimitiveType(true); ((BasicType) getJDoubleType()).setIsJavaPrimitiveType(true); ((BasicType) getJFloatType()).setIsJavaPrimitiveType(true); ((BasicType) getJIntType()).setIsJavaPrimitiveType(true); ((BasicType) getJLongType()).setIsJavaPrimitiveType(true); ((BasicType) getJShortType()).setIsJavaPrimitiveType(true); } private Type lookupPrimitiveType(String typeName) { Type type = lookupType(typeName, false); if (type == null) { throw new RuntimeException("Error initializing the HotSpotDataBase: could not find the primitive type \"" + typeName + "\" in the remote VM's VMStructs table. This type is required in " + "order to determine the size of Java primitive types. Can not continue."); } return type; } private void readExternalDefinitions() { String file = System.getProperty("sun.jvm.hotspot.typedb"); if (file != null) { System.out.println("Reading " + file); BufferedReader in = null; try { StreamTokenizer t = new StreamTokenizer(in = new BufferedReader(new InputStreamReader(new FileInputStream(file)))); t.resetSyntax(); t.wordChars('\u0000','\uFFFF'); t.whitespaceChars(' ', ' '); t.whitespaceChars('\n', '\n'); t.whitespaceChars('\r', '\r'); t.quoteChar('\"'); t.eolIsSignificant(true); while (t.nextToken() != StreamTokenizer.TT_EOF) { if (t.ttype == StreamTokenizer.TT_EOL) { continue; } if (t.sval.equals("field")) { t.nextToken(); BasicType containingType = (BasicType)lookupType(t.sval); t.nextToken(); String fieldName = t.sval; // The field's Type must already be in the database -- no exceptions t.nextToken(); Type fieldType = lookupType(t.sval); t.nextToken(); boolean isStatic = Boolean.valueOf(t.sval).booleanValue(); t.nextToken(); long offset = Long.parseLong(t.sval); t.nextToken(); Address staticAddress = null; if (isStatic) { throw new InternalError("static fields not supported"); } // check to see if the field already exists Iterator i = containingType.getFields(); boolean defined = false; while (i.hasNext()) { Field f = (Field) i.next(); if (f.getName().equals(fieldName)) { if (f.isStatic() != isStatic) { throw new RuntimeException("static/nonstatic mismatch: " + fieldName); } if (!isStatic) { if (f.getOffset() != offset) { throw new RuntimeException("bad redefinition of field offset: " + fieldName); } } else { if (!f.getStaticFieldAddress().equals(staticAddress)) { throw new RuntimeException("bad redefinition of field location: " + fieldName); } } if (f.getType() != fieldType) { System.out.println(fieldType); System.out.println(f.getType()); throw new RuntimeException("bad redefinition of field type: " + fieldName); } defined = true; break; } } if (!defined) { // Create field by type createField(containingType, fieldName, fieldType, isStatic, offset, staticAddress); } } else if (t.sval.equals("type")) { t.nextToken(); String typeName = t.sval; t.nextToken(); String superclassName = t.sval; if (superclassName.equals("null")) { superclassName = null; } t.nextToken(); boolean isOop = Boolean.valueOf(t.sval).booleanValue(); t.nextToken(); boolean isInteger = Boolean.valueOf(t.sval).booleanValue(); t.nextToken(); boolean isUnsigned = Boolean.valueOf(t.sval).booleanValue(); t.nextToken(); long size = Long.parseLong(t.sval); BasicType type = null; try { type = (BasicType)lookupType(typeName); } catch (RuntimeException e) { } if (type != null) { if (type.isOopType() != isOop) { throw new RuntimeException("oop mismatch in type definition: " + typeName); } if (type.isCIntegerType() != isInteger) { throw new RuntimeException("integer type mismatch in type definition: " + typeName); } if (type.isCIntegerType() && (((CIntegerType)type).isUnsigned()) != isUnsigned) { throw new RuntimeException("unsigned mismatch in type definition: " + typeName); } if (type.getSuperclass() == null) { if (superclassName != null) { if (type.getSize() == -1) { type.setSuperclass(lookupType(superclassName)); } else { throw new RuntimeException("unexpected superclass in type definition: " + typeName); } } } else { if (superclassName == null) { throw new RuntimeException("missing superclass in type definition: " + typeName); } if (!type.getSuperclass().getName().equals(superclassName)) { throw new RuntimeException("incorrect superclass in type definition: " + typeName); } } if (type.getSize() != size) { if (type.getSize() == -1 || type.getSize() == 0) { type.setSize(size); } else { throw new RuntimeException("size mismatch in type definition: " + typeName + ": " + type.getSize() + " != " + size); } } } if (lookupType(typeName, false) == null) { // Create type createType(typeName, superclassName, isOop, isInteger, isUnsigned, size); } } else { throw new InternalError("\"" + t.sval + "\""); } } } catch (IOException ioe) { ioe.printStackTrace(); } finally { try { in.close(); } catch (Exception e) { } } } } private void readVMStructs() { // Get the variables we need in order to traverse the VMStructEntry[] long structEntryTypeNameOffset; long structEntryFieldNameOffset; long structEntryTypeStringOffset; long structEntryIsStaticOffset; long structEntryOffsetOffset; long structEntryAddressOffset; long structEntryArrayStride; structEntryTypeNameOffset = getLongValueFromProcess("gHotSpotVMStructEntryTypeNameOffset"); structEntryFieldNameOffset = getLongValueFromProcess("gHotSpotVMStructEntryFieldNameOffset"); structEntryTypeStringOffset = getLongValueFromProcess("gHotSpotVMStructEntryTypeStringOffset"); structEntryIsStaticOffset = getLongValueFromProcess("gHotSpotVMStructEntryIsStaticOffset"); structEntryOffsetOffset = getLongValueFromProcess("gHotSpotVMStructEntryOffsetOffset"); structEntryAddressOffset = getLongValueFromProcess("gHotSpotVMStructEntryAddressOffset"); structEntryArrayStride = getLongValueFromProcess("gHotSpotVMStructEntryArrayStride"); // Fetch the address of the VMStructEntry* Address entryAddr = lookupInProcess("gHotSpotVMStructs"); // Dereference this once to get the pointer to the first VMStructEntry entryAddr = entryAddr.getAddressAt(0); if (entryAddr == null) { throw new RuntimeException("gHotSpotVMStructs was not initialized properly in the remote process; can not continue"); } // Start iterating down it until we find an entry with no name Address fieldNameAddr = null; String typeName = null; String fieldName = null; String typeString = null; boolean isStatic = false; long offset = 0; Address staticFieldAddr = null; long size = 0; long index = 0; String opaqueName = "<opaque>"; lookupOrCreateClass(opaqueName, false, false, false); do { // Fetch the field name first fieldNameAddr = entryAddr.getAddressAt(structEntryFieldNameOffset); if (fieldNameAddr != null) { fieldName = CStringUtilities.getString(fieldNameAddr); // Now the rest of the names. Keep in mind that the type name // may be NULL, indicating that the type is opaque. Address addr = entryAddr.getAddressAt(structEntryTypeNameOffset); if (addr == null) { throw new RuntimeException("gHotSpotVMStructs unexpectedly had a NULL type name at index " + index); } typeName = CStringUtilities.getString(addr); addr = entryAddr.getAddressAt(structEntryTypeStringOffset); if (addr == null) { typeString = opaqueName; } else { typeString = CStringUtilities.getString(addr); } isStatic = !(entryAddr.getCIntegerAt(structEntryIsStaticOffset, C_INT32_SIZE, false) == 0); if (isStatic) { staticFieldAddr = entryAddr.getAddressAt(structEntryAddressOffset); offset = 0; } else { offset = entryAddr.getCIntegerAt(structEntryOffsetOffset, C_INT64_SIZE, true); staticFieldAddr = null; } // The containing Type must already be in the database -- no exceptions BasicType containingType = lookupOrFail(typeName); // The field's Type must already be in the database -- no exceptions BasicType fieldType = (BasicType)lookupType(typeString); // Create field by type createField(containingType, fieldName, fieldType, isStatic, offset, staticFieldAddr); } ++index; entryAddr = entryAddr.addOffsetTo(structEntryArrayStride); } while (fieldNameAddr != null); } private void readVMIntConstants() { // Get the variables we need in order to traverse the VMIntConstantEntry[] long intConstantEntryNameOffset; long intConstantEntryValueOffset; long intConstantEntryArrayStride; intConstantEntryNameOffset = getLongValueFromProcess("gHotSpotVMIntConstantEntryNameOffset"); intConstantEntryValueOffset = getLongValueFromProcess("gHotSpotVMIntConstantEntryValueOffset"); intConstantEntryArrayStride = getLongValueFromProcess("gHotSpotVMIntConstantEntryArrayStride"); // Fetch the address of the VMIntConstantEntry* Address entryAddr = lookupInProcess("gHotSpotVMIntConstants"); // Dereference this once to get the pointer to the first VMIntConstantEntry entryAddr = entryAddr.getAddressAt(0); if (entryAddr == null) { throw new RuntimeException("gHotSpotVMIntConstants was not initialized properly in the remote process; can not continue"); } // Start iterating down it until we find an entry with no name Address nameAddr = null; do { // Fetch the type name first nameAddr = entryAddr.getAddressAt(intConstantEntryNameOffset); if (nameAddr != null) { String name = CStringUtilities.getString(nameAddr); int value = (int) entryAddr.getCIntegerAt(intConstantEntryValueOffset, C_INT32_SIZE, false); // Be a little resilient Integer oldValue = lookupIntConstant(name, false); if (oldValue == null) { addIntConstant(name, value); } else { if (oldValue.intValue() != value) { throw new RuntimeException("Error: the integer constant \"" + name + "\" had its value redefined (old was " + oldValue + ", new is " + value + ". Aborting."); } else { System.err.println("Warning: the int constant \"" + name + "\" (declared in the remote VM in VMStructs::localHotSpotVMIntConstants) " + "had its value declared as " + value + " twice. Continuing."); } } } entryAddr = entryAddr.addOffsetTo(intConstantEntryArrayStride); } while (nameAddr != null); } private void readVMLongConstants() { // Get the variables we need in order to traverse the VMLongConstantEntry[] long longConstantEntryNameOffset; long longConstantEntryValueOffset; long longConstantEntryArrayStride; longConstantEntryNameOffset = getLongValueFromProcess("gHotSpotVMLongConstantEntryNameOffset"); longConstantEntryValueOffset = getLongValueFromProcess("gHotSpotVMLongConstantEntryValueOffset"); longConstantEntryArrayStride = getLongValueFromProcess("gHotSpotVMLongConstantEntryArrayStride"); // Fetch the address of the VMLongConstantEntry* Address entryAddr = lookupInProcess("gHotSpotVMLongConstants"); // Dereference this once to get the pointer to the first VMLongConstantEntry entryAddr = entryAddr.getAddressAt(0); if (entryAddr == null) { throw new RuntimeException("gHotSpotVMLongConstants was not initialized properly in the remote process; can not continue"); } // Start iterating down it until we find an entry with no name Address nameAddr = null; do { // Fetch the type name first nameAddr = entryAddr.getAddressAt(longConstantEntryNameOffset); if (nameAddr != null) { String name = CStringUtilities.getString(nameAddr); int value = (int) entryAddr.getCIntegerAt(longConstantEntryValueOffset, C_INT64_SIZE, true); // Be a little resilient Long oldValue = lookupLongConstant(name, false); if (oldValue == null) { addLongConstant(name, value); } else { if (oldValue.longValue() != value) { throw new RuntimeException("Error: the long constant \"" + name + "\" had its value redefined (old was " + oldValue + ", new is " + value + ". Aborting."); } else { System.err.println("Warning: the long constant \"" + name + "\" (declared in the remote VM in VMStructs::localHotSpotVMLongConstants) " + "had its value declared as " + value + " twice. Continuing."); } } } entryAddr = entryAddr.addOffsetTo(longConstantEntryArrayStride); } while (nameAddr != null); } private BasicType lookupOrFail(String typeName) { BasicType type = (BasicType) lookupType(typeName, false); if (type == null) { throw new RuntimeException("Type \"" + typeName + "\", referenced in VMStructs::localHotSpotVMStructs in the remote VM, " + "was not present in the remote VMStructs::localHotSpotVMTypes table (should have been caught " + "in the debug build of that VM). Can not continue."); } return type; } private long getLongValueFromProcess(String symbol) { return lookupInProcess(symbol).getCIntegerAt(0, C_INT64_SIZE, true); } private Address lookupInProcess(String symbol) throws NoSuchSymbolException { // FIXME: abstract away the loadobject name for (int i = 0; i < jvmLibNames.length; i++) { Address addr = symbolLookup.lookup(jvmLibNames[i], symbol); if (addr != null) { return addr; } } String errStr = "("; for (int i = 0; i < jvmLibNames.length; i++) { errStr += jvmLibNames[i]; if (i < jvmLibNames.length - 1) { errStr += ", "; } } errStr += ")"; throw new NoSuchSymbolException(symbol, "Could not find symbol \"" + symbol + "\" in any of the known library names " + errStr); } private BasicType lookupOrCreateClass(String typeName, boolean isOopType, boolean isIntegerType, boolean isUnsigned) { BasicType type = (BasicType) lookupType(typeName, false); if (type == null) { // Create a new type type = createBasicType(typeName, isOopType, isIntegerType, isUnsigned); } return type; } /** Creates a new BasicType, initializes its size to -1 so we can test to ensure that all types' sizes are initialized by VMTypes, and adds it to the database. Takes care of initializing integer and oop types properly. */ private BasicType createBasicType(String typeName, boolean isOopType, boolean isIntegerType, boolean isUnsigned) { BasicType type = null; if (isIntegerType) { type = new BasicCIntegerType(this, typeName, isUnsigned); } else { if (typeNameIsPointerType(typeName)) { type = recursiveCreateBasicPointerType(typeName); } else { type = new BasicType(this, typeName); } if (isOopType) { // HACK: turn markOop into a C integer type. This allows // proper handling of it in the Serviceability Agent. (FIXME // -- consider doing something different here) if (typeName.equals("markOop")) { type = new BasicCIntegerType(this, typeName, true); } else { type.setIsOopType(true); } } } type.setSize(UNINITIALIZED_SIZE); addType(type); return type; } /** Recursively creates a PointerType from the string representation of the type's name. Note that this currently needs some workarounds due to incomplete information in the VMStructs database. */ private BasicPointerType recursiveCreateBasicPointerType(String typeName) { BasicPointerType result = (BasicPointerType)super.lookupType(typeName, false); if (result != null) { return result; } String targetTypeName = typeName.substring(0, typeName.lastIndexOf('*')).trim(); Type targetType = null; if (typeNameIsPointerType(targetTypeName)) { targetType = lookupType(targetTypeName, false); if (targetType == null) { targetType = recursiveCreateBasicPointerType(targetTypeName); } } else { targetType = lookupType(targetTypeName, false); if (targetType == null) { // Workaround for missing C integer types in database. // Also looks like we can't throw an exception for other // missing target types because there are some in old // VMStructs tables that didn't have the target type declared. // For this case, we create basic types that never get filled // in. if (targetTypeName.equals("char") || targetTypeName.equals("const char")) { // We don't have a representation of const-ness of C types in the SA BasicType basicTargetType = createBasicType(targetTypeName, false, true, false); basicTargetType.setSize(1); targetType = basicTargetType; } else if (targetTypeName.equals("u_char")) { BasicType basicTargetType = createBasicType(targetTypeName, false, true, true); basicTargetType.setSize(1); targetType = basicTargetType; } else { if (DEBUG) { System.err.println("WARNING: missing target type \"" + targetTypeName + "\" for pointer type \"" + typeName + "\""); } targetType = createBasicType(targetTypeName, false, false, false); } } } result = new BasicPointerType(this, typeName, targetType); if (pointerSize == UNINITIALIZED_SIZE && !typeName.equals("void*")) { // void* must be declared early so that other pointer types can use that to set their size. throw new InternalError("void* type hasn't been seen when parsing " + typeName); } result.setSize(pointerSize); addType(result); return result; } private boolean typeNameIsPointerType(String typeName) { int i = typeName.length() - 1; while (i >= 0 && Character.isWhitespace(typeName.charAt(i))) { --i; } if (i >= 0 && typeName.charAt(i) == '*') { return true; } return false; } public void createType(String typeName, String superclassName, boolean isOopType, boolean isIntegerType, boolean isUnsigned, long size) { // See whether we have a superclass BasicType superclass = null; if (superclassName != null) { // Fetch or create it (FIXME: would get oop types wrong if // they had a hierarchy; consider using lookupOrFail) superclass = lookupOrCreateClass(superclassName, false, false, false); } // Lookup or create the current type BasicType curType = lookupOrCreateClass(typeName, isOopType, isIntegerType, isUnsigned); // Set superclass and/or ensure it's correct if (superclass != null) { if (curType.getSuperclass() == null) { // Set the superclass in the current type curType.setSuperclass(superclass); } if (curType.getSuperclass() != superclass) { throw new RuntimeException("Error: the type \"" + typeName + "\" (declared in the remote VM in VMStructs::localHotSpotVMTypes) " + "had its superclass redefined (old was " + curType.getSuperclass().getName() + ", new is " + superclass.getName() + ")."); } } // Classes are created with a size of UNINITIALIZED_SIZE. // Set size if necessary. if (curType.getSize() == UNINITIALIZED_SIZE || curType.getSize() == 0) { curType.setSize(size); } else { if (curType.getSize() != size) { throw new RuntimeException("Error: the type \"" + typeName + "\" (declared in the remote VM in VMStructs::localHotSpotVMTypes) " + "had its size redefined (old was " + curType.getSize() + ", new is " + size + ")."); } if (!typeNameIsPointerType(typeName)) { System.err.println("Warning: the type \"" + typeName + "\" (declared in the remote VM in VMStructs::localHotSpotVMTypes) " + "had its size declared as " + size + " twice. Continuing."); } } } /** "Virtual constructor" for fields based on type */ public void createField(BasicType containingType, String name, Type type, boolean isStatic, long offset, Address staticFieldAddress) { // Add field to containing type containingType.addField(internalCreateField(containingType, name, type, isStatic, offset, staticFieldAddress)); } Field internalCreateField(BasicType containingType, String name, Type type, boolean isStatic, long offset, Address staticFieldAddress) { // "Virtual constructor" based on type if (type.isOopType()) { return new BasicOopField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type instanceof CIntegerType) { return new BasicCIntegerField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJBooleanType())) { return new BasicJBooleanField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJByteType())) { return new BasicJByteField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJCharType())) { return new BasicJCharField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJDoubleType())) { return new BasicJDoubleField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJFloatType())) { return new BasicJFloatField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJIntType())) { return new BasicJIntField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJLongType())) { return new BasicJLongField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } if (type.equals(getJShortType())) { return new BasicJShortField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } // Unknown ("opaque") type. Instantiate ordinary Field. return new BasicField(this, containingType, name, type, isStatic, offset, staticFieldAddress); } // For debugging private void dumpMemory(Address addr, int len) { int i = 0; while (i < len) { System.err.print(addr.addOffsetTo(i) + ":"); for (int j = 0; j < 8 && i < len; i++, j++) { String s = Long.toHexString(addr.getCIntegerAt(i, 1, true)); System.err.print(" 0x"); for (int k = 0; k < 2 - s.length(); k++) { System.err.print("0"); } System.err.print(s); } System.err.println(); } } }
googleapis/google-cloud-java
35,370
java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/FailoverInstanceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1beta; /** * * * <pre> * Message for triggering failover on an Instance * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.FailoverInstanceRequest} */ public final class FailoverInstanceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.FailoverInstanceRequest) FailoverInstanceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use FailoverInstanceRequest.newBuilder() to construct. private FailoverInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FailoverInstanceRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FailoverInstanceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_FailoverInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_FailoverInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.class, com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1beta.FailoverInstanceRequest)) { return super.equals(obj); } com.google.cloud.alloydb.v1beta.FailoverInstanceRequest other = (com.google.cloud.alloydb.v1beta.FailoverInstanceRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.alloydb.v1beta.FailoverInstanceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for triggering failover on an Instance * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.FailoverInstanceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.FailoverInstanceRequest) com.google.cloud.alloydb.v1beta.FailoverInstanceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_FailoverInstanceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_FailoverInstanceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.class, com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.Builder.class); } // Construct using com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_FailoverInstanceRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1beta.FailoverInstanceRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1beta.FailoverInstanceRequest build() { com.google.cloud.alloydb.v1beta.FailoverInstanceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1beta.FailoverInstanceRequest buildPartial() { com.google.cloud.alloydb.v1beta.FailoverInstanceRequest result = new com.google.cloud.alloydb.v1beta.FailoverInstanceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1beta.FailoverInstanceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1beta.FailoverInstanceRequest) { return mergeFrom((com.google.cloud.alloydb.v1beta.FailoverInstanceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1beta.FailoverInstanceRequest other) { if (other == com.google.cloud.alloydb.v1beta.FailoverInstanceRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Instance.name field. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.FailoverInstanceRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.FailoverInstanceRequest) private static final com.google.cloud.alloydb.v1beta.FailoverInstanceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.FailoverInstanceRequest(); } public static com.google.cloud.alloydb.v1beta.FailoverInstanceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FailoverInstanceRequest> PARSER = new com.google.protobuf.AbstractParser<FailoverInstanceRequest>() { @java.lang.Override public FailoverInstanceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FailoverInstanceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FailoverInstanceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1beta.FailoverInstanceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,384
java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/EvaluateProcessorVersionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1/document_processor_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1; /** * * * <pre> * Evaluates the given * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] against the * supplied documents. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.EvaluateProcessorVersionRequest} */ public final class EvaluateProcessorVersionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.EvaluateProcessorVersionRequest) EvaluateProcessorVersionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use EvaluateProcessorVersionRequest.newBuilder() to construct. private EvaluateProcessorVersionRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private EvaluateProcessorVersionRequest() { processorVersion_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new EvaluateProcessorVersionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_EvaluateProcessorVersionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_EvaluateProcessorVersionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.class, com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.Builder.class); } private int bitField0_; public static final int PROCESSOR_VERSION_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object processorVersion_ = ""; /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The processorVersion. */ @java.lang.Override public java.lang.String getProcessorVersion() { java.lang.Object ref = processorVersion_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); processorVersion_ = s; return s; } } /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for processorVersion. */ @java.lang.Override public com.google.protobuf.ByteString getProcessorVersionBytes() { java.lang.Object ref = processorVersion_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); processorVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int EVALUATION_DOCUMENTS_FIELD_NUMBER = 3; private com.google.cloud.documentai.v1.BatchDocumentsInputConfig evaluationDocuments_; /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the evaluationDocuments field is set. */ @java.lang.Override public boolean hasEvaluationDocuments() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The evaluationDocuments. */ @java.lang.Override public com.google.cloud.documentai.v1.BatchDocumentsInputConfig getEvaluationDocuments() { return evaluationDocuments_ == null ? com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance() : evaluationDocuments_; } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.cloud.documentai.v1.BatchDocumentsInputConfigOrBuilder getEvaluationDocumentsOrBuilder() { return evaluationDocuments_ == null ? com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance() : evaluationDocuments_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(processorVersion_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, processorVersion_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getEvaluationDocuments()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(processorVersion_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, processorVersion_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getEvaluationDocuments()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest)) { return super.equals(obj); } com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest other = (com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest) obj; if (!getProcessorVersion().equals(other.getProcessorVersion())) return false; if (hasEvaluationDocuments() != other.hasEvaluationDocuments()) return false; if (hasEvaluationDocuments()) { if (!getEvaluationDocuments().equals(other.getEvaluationDocuments())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROCESSOR_VERSION_FIELD_NUMBER; hash = (53 * hash) + getProcessorVersion().hashCode(); if (hasEvaluationDocuments()) { hash = (37 * hash) + EVALUATION_DOCUMENTS_FIELD_NUMBER; hash = (53 * hash) + getEvaluationDocuments().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Evaluates the given * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] against the * supplied documents. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.EvaluateProcessorVersionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.EvaluateProcessorVersionRequest) com.google.cloud.documentai.v1.EvaluateProcessorVersionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_EvaluateProcessorVersionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_EvaluateProcessorVersionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.class, com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.Builder.class); } // Construct using com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEvaluationDocumentsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; processorVersion_ = ""; evaluationDocuments_ = null; if (evaluationDocumentsBuilder_ != null) { evaluationDocumentsBuilder_.dispose(); evaluationDocumentsBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_EvaluateProcessorVersionRequest_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest getDefaultInstanceForType() { return com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest build() { com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest buildPartial() { com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest result = new com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.processorVersion_ = processorVersion_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.evaluationDocuments_ = evaluationDocumentsBuilder_ == null ? evaluationDocuments_ : evaluationDocumentsBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest) { return mergeFrom((com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest other) { if (other == com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest.getDefaultInstance()) return this; if (!other.getProcessorVersion().isEmpty()) { processorVersion_ = other.processorVersion_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasEvaluationDocuments()) { mergeEvaluationDocuments(other.getEvaluationDocuments()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { processorVersion_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 26: { input.readMessage( getEvaluationDocumentsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object processorVersion_ = ""; /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The processorVersion. */ public java.lang.String getProcessorVersion() { java.lang.Object ref = processorVersion_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); processorVersion_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for processorVersion. */ public com.google.protobuf.ByteString getProcessorVersionBytes() { java.lang.Object ref = processorVersion_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); processorVersion_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The processorVersion to set. * @return This builder for chaining. */ public Builder setProcessorVersion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } processorVersion_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearProcessorVersion() { processorVersion_ = getDefaultInstance().getProcessorVersion(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the * [ProcessorVersion][google.cloud.documentai.v1.ProcessorVersion] to * evaluate. * `projects/{project}/locations/{location}/processors/{processor}/processorVersions/{processorVersion}` * </pre> * * <code> * string processor_version = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for processorVersion to set. * @return This builder for chaining. */ public Builder setProcessorVersionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); processorVersion_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.documentai.v1.BatchDocumentsInputConfig evaluationDocuments_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.BatchDocumentsInputConfig, com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder, com.google.cloud.documentai.v1.BatchDocumentsInputConfigOrBuilder> evaluationDocumentsBuilder_; /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the evaluationDocuments field is set. */ public boolean hasEvaluationDocuments() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The evaluationDocuments. */ public com.google.cloud.documentai.v1.BatchDocumentsInputConfig getEvaluationDocuments() { if (evaluationDocumentsBuilder_ == null) { return evaluationDocuments_ == null ? com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance() : evaluationDocuments_; } else { return evaluationDocumentsBuilder_.getMessage(); } } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setEvaluationDocuments( com.google.cloud.documentai.v1.BatchDocumentsInputConfig value) { if (evaluationDocumentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } evaluationDocuments_ = value; } else { evaluationDocumentsBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setEvaluationDocuments( com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder builderForValue) { if (evaluationDocumentsBuilder_ == null) { evaluationDocuments_ = builderForValue.build(); } else { evaluationDocumentsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeEvaluationDocuments( com.google.cloud.documentai.v1.BatchDocumentsInputConfig value) { if (evaluationDocumentsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && evaluationDocuments_ != null && evaluationDocuments_ != com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance()) { getEvaluationDocumentsBuilder().mergeFrom(value); } else { evaluationDocuments_ = value; } } else { evaluationDocumentsBuilder_.mergeFrom(value); } if (evaluationDocuments_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearEvaluationDocuments() { bitField0_ = (bitField0_ & ~0x00000002); evaluationDocuments_ = null; if (evaluationDocumentsBuilder_ != null) { evaluationDocumentsBuilder_.dispose(); evaluationDocumentsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder getEvaluationDocumentsBuilder() { bitField0_ |= 0x00000002; onChanged(); return getEvaluationDocumentsFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.cloud.documentai.v1.BatchDocumentsInputConfigOrBuilder getEvaluationDocumentsOrBuilder() { if (evaluationDocumentsBuilder_ != null) { return evaluationDocumentsBuilder_.getMessageOrBuilder(); } else { return evaluationDocuments_ == null ? com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance() : evaluationDocuments_; } } /** * * * <pre> * Optional. The documents used in the evaluation. If unspecified, use the * processor's dataset as evaluation input. * </pre> * * <code> * .google.cloud.documentai.v1.BatchDocumentsInputConfig evaluation_documents = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.BatchDocumentsInputConfig, com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder, com.google.cloud.documentai.v1.BatchDocumentsInputConfigOrBuilder> getEvaluationDocumentsFieldBuilder() { if (evaluationDocumentsBuilder_ == null) { evaluationDocumentsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.BatchDocumentsInputConfig, com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder, com.google.cloud.documentai.v1.BatchDocumentsInputConfigOrBuilder>( getEvaluationDocuments(), getParentForChildren(), isClean()); evaluationDocuments_ = null; } return evaluationDocumentsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.EvaluateProcessorVersionRequest) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.EvaluateProcessorVersionRequest) private static final com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest(); } public static com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<EvaluateProcessorVersionRequest> PARSER = new com.google.protobuf.AbstractParser<EvaluateProcessorVersionRequest>() { @java.lang.Override public EvaluateProcessorVersionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<EvaluateProcessorVersionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<EvaluateProcessorVersionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1.EvaluateProcessorVersionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googlearchive/android-fido
35,579
app/src/main/java/com/fido/example/fido2apiexample/Fido2DemoActivity.java
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fido.example.fido2apiexample; import android.Manifest; import android.content.Context; import android.content.Intent; import android.content.IntentSender; import android.content.SharedPreferences; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.annotation.NonNull; import android.support.design.widget.NavigationView; import android.support.multidex.MultiDex; import android.support.v4.app.ActivityCompat; import android.support.v4.content.ContextCompat; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; import com.google.android.gms.auth.api.Auth; import com.google.android.gms.auth.api.signin.GoogleSignInAccount; import com.google.android.gms.auth.api.signin.GoogleSignInOptions; import com.google.android.gms.auth.api.signin.GoogleSignInResult; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.SignInButton; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.OptionalPendingResult; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.android.gms.fido.Fido; import com.google.android.gms.fido.fido2.Fido2ApiClient; import com.google.android.gms.fido.fido2.Fido2PendingIntent; import com.google.android.gms.fido.fido2.api.common.AuthenticatorAssertionResponse; import com.google.android.gms.fido.fido2.api.common.AuthenticatorAttestationResponse; import com.google.android.gms.fido.fido2.api.common.AuthenticatorErrorResponse; import com.google.android.gms.fido.fido2.api.common.PublicKeyCredentialCreationOptions; import com.google.android.gms.fido.fido2.api.common.PublicKeyCredentialRequestOptions; import com.google.android.gms.tasks.OnCompleteListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.android.gms.tasks.Task; import com.google.android.gms.tasks.Tasks; import com.google.common.base.Strings; import com.google.common.collect.FluentIterable; import com.google.common.io.BaseEncoding; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.json.JSONException; import org.json.JSONObject; /** * Activity that conducts registration and authentication operations against WebAuthn demo server. */ public class Fido2DemoActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener, GoogleApiClient.OnConnectionFailedListener, View.OnClickListener { private static final String TAG = "Fido2DemoActivity"; private static final String KEY_KEY_HANDLE = "handle"; private static final String KEY_CREDENTIAL = "credential"; private static final String KEY_CREDENTIAL_ID = "id"; private static final int RC_SIGN_IN = 9001; private static final int REQUEST_CODE_REGISTER = 0; private static final int REQUEST_CODE_SIGN = 1; private static final int GET_ACCOUNTS_PERMISSIONS_REQUEST_REGISTER = 0x11; private static final int GET_ACCOUNTS_PERMISSIONS_REQUEST_SIGN = 0x13; private static final int GET_ACCOUNTS_PERMISSIONS_ALL_TOKENS = 0x15; // Create a new ThreadPoolExecutor with 2 threads for each processor on the // device and a 60 second keep-alive time. private static final int NUM_CORES = Runtime.getRuntime().availableProcessors(); private static final ThreadPoolExecutor THREAD_POOL_EXECUTOR = new ThreadPoolExecutor( NUM_CORES * 2, NUM_CORES * 2, 60L, TimeUnit.SECONDS, new LinkedBlockingDeque<Runnable>()); private ProgressBar progressBar; private SwipeRefreshLayout swipeRefreshLayout; private RecyclerView recyclerView; private SecurityTokenAdapter adapter; private List<Map<String, String>> securityTokens; private SignInButton signInButton; private TextView userEmailTextView; private TextView displayNameTextView; private MenuItem operationMenuItem; private MenuItem signInMenuItem; private MenuItem signOutMenuItem; private GoogleApiClient googleApiClient; private GAEService gaeService; private GoogleSignInAccount googleSignInAccount; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_navigation); // START Google sign in API client // configure sign-in to request user info GoogleSignInOptions gso = new GoogleSignInOptions.Builder(GoogleSignInOptions.DEFAULT_SIGN_IN) .requestEmail() .requestIdToken(Constants.SERVER_CLIENT_ID) .build(); // build client with access to Google Sign-In API and the options specified by gso googleApiClient = new GoogleApiClient.Builder(this) .enableAutoManage(this /* FragmentActivity */, this /* OnConnectionFailedListener */) .addApi(Auth.GOOGLE_SIGN_IN_API, gso) .build(); // END Google sign in API client // START prepare main layout Toolbar toolbar = findViewById(R.id.toolbar); setSupportActionBar(toolbar); progressBar = findViewById(R.id.progressBar); swipeRefreshLayout = findViewById(R.id.swipe_container); swipeRefreshLayout.setColorSchemeColors(getResources().getColor(R.color.colorAccent)); swipeRefreshLayout.setRefreshing(true); swipeRefreshLayout.setOnRefreshListener( new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { updateAndDisplayRegisteredKeys(); } }); recyclerView = findViewById(R.id.list); recyclerView.setLayoutManager(new LinearLayoutManager(this)); adapter = new SecurityTokenAdapter( new ArrayList<Map<String, String>>(), R.layout.row_token, Fido2DemoActivity.this); // END prepare main layout // START prepare drawer layout DrawerLayout drawer = findViewById(R.id.drawer_layout); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle( this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawer.setDrawerListener(toggle); toggle.syncState(); NavigationView navigationView = findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(this); navigationView.setItemIconTintList(null); View header = navigationView.getHeaderView(0); userEmailTextView = header.findViewById(R.id.userEmail); displayNameTextView = header.findViewById(R.id.displayName); Menu menu = navigationView.getMenu(); operationMenuItem = menu.findItem(R.id.nav_fido2Operations); signInMenuItem = menu.findItem(R.id.nav_signin); signOutMenuItem = menu.findItem(R.id.nav_signout); signInButton = findViewById(R.id.sign_in_button); signInButton.setSize(SignInButton.SIZE_WIDE); signInButton.setScopes(gso.getScopeArray()); signInButton.setOnClickListener(this); // END prepare drawer layout // request SignIn or load registered tokens updateUI(); } /** Show SignIn button to request user sign in or display all registered security tokens */ private void updateUI() { // We check a boolean value in SharedPreferences to determine whether the user has been // signed in. This value is false by default. It would be set to true after signing in and // would be reset to false after user clicks "Sign out". // After the users clicks "Sign out", we couldn't use // GoogleSignInApi#silentSignIn(GoogleApiClient), because it silently signs in the user // again. Thus, we rely on this boolean value in SharedPreferences. if (!getAccountSignInStatus()) { displayAccountNotSignedIn(); return; } OptionalPendingResult<GoogleSignInResult> pendingResult = Auth.GoogleSignInApi.silentSignIn(googleApiClient); if (pendingResult.isDone()) { // If the user's cached credentials are valid, the OptionalPendingResult will be "done" // and the GoogleSignInResult will be available instantly. GoogleSignInResult result = pendingResult.get(); if (result.isSuccess()) { googleSignInAccount = result.getSignInAccount(); displayAccountSignedIn( result.getSignInAccount().getEmail(), result.getSignInAccount().getDisplayName()); } else { displayAccountNotSignedIn(); } } else { // If the user has not previously signed in on this device or the sign-in has expired, // this asynchronous branch will attempt to sign in the user silently. Cross-device // single sign-on will occur in this branch. displayAccountNotSignedIn(); pendingResult.setResultCallback( new ResultCallback<GoogleSignInResult>() { @Override public void onResult(@NonNull GoogleSignInResult result) { if (result.isSuccess()) { googleSignInAccount = result.getSignInAccount(); displayAccountSignedIn( result.getSignInAccount().getEmail(), result.getSignInAccount().getDisplayName()); } else { displayAccountNotSignedIn(); } } }); } } private void displayAccountSignedIn(String email, String displayName) { swipeRefreshLayout.setVisibility(View.VISIBLE); userEmailTextView.setText(email); displayNameTextView.setText(displayName); operationMenuItem.setVisible(true); signInMenuItem.setVisible(false); signOutMenuItem.setVisible(true); updateAndDisplayRegisteredKeys(); signInButton.setVisibility(View.GONE); } private void displayAccountNotSignedIn() { signInButton.setVisibility(View.VISIBLE); userEmailTextView.setText(""); displayNameTextView.setText(""); operationMenuItem.setVisible(false); signInMenuItem.setVisible(true); signOutMenuItem.setVisible(false); swipeRefreshLayout.setVisibility(View.GONE); progressBar.setVisibility(View.GONE); } private void displayRegisteredKeys() { adapter.clearSecurityTokens(); adapter.addSecurityToken(securityTokens); recyclerView.setAdapter(adapter); swipeRefreshLayout.setRefreshing(false); progressBar.setVisibility(View.GONE); } private void getRegisterRequest() { if (ContextCompat.checkSelfPermission(this, Manifest.permission.GET_ACCOUNTS) == PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "getRegisterRequest permission is granted"); Task<PublicKeyCredentialCreationOptions> getRegisterRequestTask = asyncGetRegisterRequest(); getRegisterRequestTask.addOnCompleteListener( new OnCompleteListener<PublicKeyCredentialCreationOptions>() { @Override public void onComplete(@NonNull Task<PublicKeyCredentialCreationOptions> task) { PublicKeyCredentialCreationOptions options = task.getResult(); if (options == null) { Log.d(TAG, "Register request is null"); return; } sendRegisterRequestToClient(options); } }); } else { Log.i(TAG, "getRegisterRequest permission is requested"); ActivityCompat.requestPermissions( this, new String[] {Manifest.permission.GET_ACCOUNTS}, GET_ACCOUNTS_PERMISSIONS_REQUEST_REGISTER); } } private void sendRegisterRequestToClient(PublicKeyCredentialCreationOptions options) { Fido2ApiClient fido2ApiClient = Fido.getFido2ApiClient(this.getApplicationContext()); Task<Fido2PendingIntent> result = fido2ApiClient.getRegisterIntent(options); result.addOnSuccessListener( new OnSuccessListener<Fido2PendingIntent>() { @Override public void onSuccess(Fido2PendingIntent fido2PendingIntent) { if (fido2PendingIntent.hasPendingIntent()) { try { fido2PendingIntent.launchPendingIntent( Fido2DemoActivity.this, REQUEST_CODE_REGISTER); Log.i(TAG, "Register request is sent out"); } catch (IntentSender.SendIntentException e) { Log.e(TAG, "Error launching pending intent for register request", e); } } } }); } private void updateRegisterResponseToServer(AuthenticatorAttestationResponse response) { Task<String> updateRegisterResponseToServerTask = asyncUpdateRegisterResponseToServer(response); updateRegisterResponseToServerTask.addOnCompleteListener( new OnCompleteListener<String>() { @Override public void onComplete(@NonNull Task<String> task) { String securityKeyToken = task.getResult(); if (securityKeyToken == null) { Toast.makeText( Fido2DemoActivity.this, "security key registration failed", Toast.LENGTH_SHORT) .show(); return; } updateAndDisplayRegisteredKeys(); Log.i( TAG, "Update register response to server with securityKeyToken: " + securityKeyToken); } }); } private void getSignRequest() { if (ContextCompat.checkSelfPermission(this, Manifest.permission.GET_ACCOUNTS) == PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "getSignRequest permission is granted"); Task<PublicKeyCredentialRequestOptions> getSignRequestTask = asyncGetSignRequest(); getSignRequestTask.addOnCompleteListener( new OnCompleteListener<PublicKeyCredentialRequestOptions>() { @Override public void onComplete(@NonNull Task<PublicKeyCredentialRequestOptions> task) { PublicKeyCredentialRequestOptions options = task.getResult(); if (options == null) { Log.i(TAG, "Sign request is null"); return; } sendSignRequestToClient(options); } }); } else { Log.i(TAG, "getSignRequest permission is requested"); ActivityCompat.requestPermissions( this, new String[] {Manifest.permission.GET_ACCOUNTS}, GET_ACCOUNTS_PERMISSIONS_REQUEST_SIGN); } } private void sendSignRequestToClient(PublicKeyCredentialRequestOptions options) { Fido2ApiClient fido2ApiClient = Fido.getFido2ApiClient(this.getApplicationContext()); Task<Fido2PendingIntent> result = fido2ApiClient.getSignIntent(options); result.addOnSuccessListener( new OnSuccessListener<Fido2PendingIntent>() { @Override public void onSuccess(Fido2PendingIntent fido2PendingIntent) { if (fido2PendingIntent.hasPendingIntent()) { try { fido2PendingIntent.launchPendingIntent(Fido2DemoActivity.this, REQUEST_CODE_SIGN); } catch (IntentSender.SendIntentException e) { Log.e(TAG, "Error launching pending intent for sign request", e); } } } }); } private void updateSignResponseToServer(AuthenticatorAssertionResponse response) { Task<String> updateSignResponseToServerTask = asyncUpdateSignResponseToServer(response); updateSignResponseToServerTask.addOnCompleteListener( new OnCompleteListener<String>() { @Override public void onComplete(@NonNull Task<String> task) { String signResult = task.getResult(); if (signResult == null) { Toast.makeText( Fido2DemoActivity.this, "this security key has not been registered!", Toast.LENGTH_SHORT) .show(); return; } Log.i(TAG, "authenticated key's pub key is " + signResult); highlightAuthenticatedToken(signResult); } }); } private void updateAndDisplayRegisteredKeys() { progressBar.setVisibility(View.VISIBLE); if (ContextCompat.checkSelfPermission(this, Manifest.permission.GET_ACCOUNTS) == PackageManager.PERMISSION_GRANTED) { Log.i(TAG, "updateAndDisplayRegisteredKeys permission is granted"); Task<List<Map<String, String>>> refreshSecurityKeyTask = asyncRefreshSecurityKey(); refreshSecurityKeyTask.addOnCompleteListener( new OnCompleteListener<List<Map<String, String>>>() { @Override public void onComplete(@NonNull Task<List<Map<String, String>>> task) { List<Map<String, String>> tokens = task.getResult(); if (tokens == null) { swipeRefreshLayout.setRefreshing(false); progressBar.setVisibility(View.GONE); return; } securityTokens = tokens; adapter.clearSecurityTokens(); adapter.addSecurityToken(securityTokens); displayRegisteredKeys(); } }); } else { Log.i(TAG, "updateAndDisplayRegisteredKeys permission is requested"); ActivityCompat.requestPermissions( this, new String[] {Manifest.permission.GET_ACCOUNTS}, GET_ACCOUNTS_PERMISSIONS_ALL_TOKENS); } } public void removeTokenByIndexInList(int whichToken) { /* assume this operation can only happen within short time after updateAndDisplayRegisteredKeys, which has already checked permission */ Task<String> removeSecurityKeyTask = asyncRemoveSecurityKey(whichToken); removeSecurityKeyTask.addOnCompleteListener( new OnCompleteListener<String>() { @Override public void onComplete(@NonNull Task<String> task) { updateAndDisplayRegisteredKeys(); } }); } private Task<PublicKeyCredentialCreationOptions> asyncGetRegisterRequest() { return Tasks.call( THREAD_POOL_EXECUTOR, new Callable<PublicKeyCredentialCreationOptions>() { @Override public PublicKeyCredentialCreationOptions call() throws Exception { gaeService = GAEService.getInstance(Fido2DemoActivity.this, googleSignInAccount); return gaeService.getRegistrationRequest( FluentIterable.from(adapter.getCheckedItems()) .transform(i -> i.get(KEY_KEY_HANDLE)) .filter(i -> !Strings.isNullOrEmpty(i)) .toList()); } }); } private Task<String> asyncUpdateRegisterResponseToServer( final AuthenticatorAttestationResponse response) { return Tasks.call( THREAD_POOL_EXECUTOR, new Callable<String>() { @Override public String call() throws Exception { gaeService = GAEService.getInstance(Fido2DemoActivity.this, googleSignInAccount); return gaeService.getRegisterResponseFromServer(response); } }); } private Task<PublicKeyCredentialRequestOptions> asyncGetSignRequest() { return Tasks.call( THREAD_POOL_EXECUTOR, new Callable<PublicKeyCredentialRequestOptions>() { @Override public PublicKeyCredentialRequestOptions call() { gaeService = GAEService.getInstance(Fido2DemoActivity.this, googleSignInAccount); return gaeService.getSignRequest( FluentIterable.from(adapter.getCheckedItems()) .transform(i -> i.get(KEY_KEY_HANDLE)) .filter(i -> !Strings.isNullOrEmpty(i)) .toList()); } }); } private Task<String> asyncUpdateSignResponseToServer( final AuthenticatorAssertionResponse response) { return Tasks.call( THREAD_POOL_EXECUTOR, new Callable<String>() { @Override public String call() throws Exception { gaeService = GAEService.getInstance(Fido2DemoActivity.this, googleSignInAccount); return gaeService.getSignResponseFromServer(response); } }); } private void highlightAuthenticatedToken(String signResult) { String credentialId; try { JSONObject signResultJson = new JSONObject(signResult); JSONObject credentialJson = signResultJson.getJSONObject(KEY_CREDENTIAL); credentialId = credentialJson.getString(KEY_CREDENTIAL_ID); } catch (JSONException e) { Log.e(TAG, "Error extracting information from JSON sign result", e); return; } int whichToken = -1; Log.i(TAG, "Successfully authenticated credential Id: " + credentialId); for (int position = 0; position < securityTokens.size(); position++) { Map<String, String> tokenMap = securityTokens.get(position); Log.d(TAG, "token map at position " + position + " is " + tokenMap.toString()); Log.i( TAG, "highlightAuthenticatedToken registered public_key: " + tokenMap.get(KEY_KEY_HANDLE)); if (credentialId.equals(tokenMap.get(KEY_KEY_HANDLE))) { whichToken = position; break; } } if (whichToken >= 0) { Log.i(TAG, "highlightAuthenticatedToken whichToken: " + whichToken); View card = recyclerView .getLayoutManager() .findViewByPosition(whichToken) .findViewById(R.id.information); card.setPressed(true); card.setPressed(false); } } private Task<List<Map<String, String>>> asyncRefreshSecurityKey() { return Tasks.call( THREAD_POOL_EXECUTOR, new Callable<List<Map<String, String>>>() { @Override public List<Map<String, String>> call() { gaeService = GAEService.getInstance(Fido2DemoActivity.this, googleSignInAccount); return gaeService.getAllSecurityTokens(); } }); } private Task<String> asyncRemoveSecurityKey(final int tokenPositionInList) { return Tasks.call( THREAD_POOL_EXECUTOR, new Callable<String>() { @Override public String call() throws Exception { gaeService = GAEService.getInstance(Fido2DemoActivity.this, googleSignInAccount); return gaeService.removeSecurityKey( securityTokens.get(tokenPositionInList).get(KEY_CREDENTIAL_ID)); } }); } private void signIn() { Intent signInIntent = Auth.GoogleSignInApi.getSignInIntent(googleApiClient); startActivityForResult(signInIntent, RC_SIGN_IN); } private void signOut() { Auth.GoogleSignInApi.signOut(googleApiClient) .setResultCallback( new ResultCallback<Status>() { @Override public void onResult(@NonNull Status status) { clearAccountSignInStatus(); updateUI(); gaeService = null; } }); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (RC_SIGN_IN == requestCode) { GoogleSignInResult siginInResult = Auth.GoogleSignInApi.getSignInResultFromIntent(data); handleSignInResult(siginInResult); return; } switch (resultCode) { case RESULT_OK: if (data.hasExtra(Fido.FIDO2_KEY_ERROR_EXTRA)) { Log.d(TAG, "Received error response from Google Play Services FIDO2 API"); AuthenticatorErrorResponse response = AuthenticatorErrorResponse.deserializeFromBytes( data.getByteArrayExtra(Fido.FIDO2_KEY_ERROR_EXTRA)); Toast.makeText( Fido2DemoActivity.this, "Operation failed\n" + response, Toast.LENGTH_SHORT) .show(); } else if (requestCode == REQUEST_CODE_REGISTER) { Log.d(TAG, "Received register response from Google Play Services FIDO2 API"); AuthenticatorAttestationResponse response = AuthenticatorAttestationResponse.deserializeFromBytes( data.getByteArrayExtra(Fido.FIDO2_KEY_RESPONSE_EXTRA)); Toast.makeText( Fido2DemoActivity.this, "Registration key handle:\n" + BaseEncoding.base64().encode(response.getKeyHandle()), Toast.LENGTH_SHORT) .show(); updateRegisterResponseToServer(response); } else if (requestCode == REQUEST_CODE_SIGN) { Log.d(TAG, "Received sign response from Google Play Services FIDO2 API"); AuthenticatorAssertionResponse response = AuthenticatorAssertionResponse.deserializeFromBytes( data.getByteArrayExtra(Fido.FIDO2_KEY_RESPONSE_EXTRA)); Toast.makeText( Fido2DemoActivity.this, "Sign key handle:\n" + BaseEncoding.base64().encode(response.getKeyHandle()), Toast.LENGTH_SHORT) .show(); updateSignResponseToServer(response); } break; case RESULT_CANCELED: Toast.makeText(Fido2DemoActivity.this, "Operation is cancelled", Toast.LENGTH_SHORT).show(); break; default: Toast.makeText( Fido2DemoActivity.this, "Operation failed, with resultCode " + resultCode, Toast.LENGTH_SHORT) .show(); break; } } private void handleSignInResult(GoogleSignInResult result) { Log.d(TAG, "handleSignInResult:" + result.isSuccess()); Log.d(TAG, "sign in result: " + result.getStatus().toString()); if (result.isSuccess()) { GoogleSignInAccount acct = result.getSignInAccount(); saveAccountSignInStatus(); Log.d(TAG, "account email" + acct.getEmail()); Log.d(TAG, "account displayName" + acct.getDisplayName()); Log.d(TAG, "account id" + acct.getId()); Log.d(TAG, "account idToken" + acct.getIdToken()); Log.d(TAG, "account scopes" + acct.getGrantedScopes()); } else { clearAccountSignInStatus(); } updateUI(); } @Override public void onRequestPermissionsResult( int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) { switch (requestCode) { case GET_ACCOUNTS_PERMISSIONS_REQUEST_REGISTER: Log.d(TAG, "onRequestPermissionsResult"); // If request is cancelled, the result arrays are empty. if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { getRegisterRequest(); } return; case GET_ACCOUNTS_PERMISSIONS_REQUEST_SIGN: getSignRequest(); return; case GET_ACCOUNTS_PERMISSIONS_ALL_TOKENS: updateAndDisplayRegisteredKeys(); return; default: // TODO: better error handling return; } } @Override public void onClick(View v) { switch (v.getId()) { case R.id.sign_in_button: signIn(); break; default: // TODO: better error handling break; } } @Override public boolean onNavigationItemSelected(@NonNull MenuItem item) { // Handle navigation view item clicks here. switch (item.getItemId()) { case R.id.nav_signin: signIn(); break; case R.id.nav_signout: signOut(); break; case R.id.nav_register: getRegisterRequest(); break; case R.id.nav_auth: getSignRequest(); break; case R.id.nav_github: Intent browser = new Intent(Intent.ACTION_VIEW, Uri.parse(getString(R.string.github_location))); this.startActivity(browser); break; default: // TODO: better error handling break; } DrawerLayout drawer = findViewById(R.id.drawer_layout); drawer.closeDrawer(GravityCompat.START); return true; } @Override public void onConnectionFailed(@NonNull ConnectionResult connectionResult) { Log.d(TAG, "onConnectionFailed:" + connectionResult); } @Override protected void onStart() { super.onStart(); googleApiClient.connect(); } @Override protected void onStop() { super.onStop(); googleApiClient.disconnect(); } @Override public void onBackPressed() { DrawerLayout drawer = findViewById(R.id.drawer_layout); if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } else { super.onBackPressed(); } } private void saveAccountSignInStatus() { SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this); SharedPreferences.Editor editor = settings.edit(); editor.putBoolean(Constants.PREF_SIGNED_IN_STATUS, true); Log.d(TAG, "Save account sign in status: true"); editor.apply(); } private void clearAccountSignInStatus() { SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this); SharedPreferences.Editor editor = settings.edit(); editor.putBoolean(Constants.PREF_SIGNED_IN_STATUS, false); Log.d(TAG, "Clear account sign in status"); editor.apply(); } private boolean getAccountSignInStatus() { SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this); return settings.getBoolean(Constants.PREF_SIGNED_IN_STATUS, false); } @Override protected void attachBaseContext(Context base) { super.attachBaseContext(base); MultiDex.install(this); } }
googleapis/google-cloud-java
35,367
java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/UpdateBackupRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/netapp/v1/backup.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.netapp.v1; /** * * * <pre> * UpdateBackupRequest updates description and/or labels for a backup. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.UpdateBackupRequest} */ public final class UpdateBackupRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.UpdateBackupRequest) UpdateBackupRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateBackupRequest.newBuilder() to construct. private UpdateBackupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateBackupRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateBackupRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.BackupProto .internal_static_google_cloud_netapp_v1_UpdateBackupRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.BackupProto .internal_static_google_cloud_netapp_v1_UpdateBackupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.UpdateBackupRequest.class, com.google.cloud.netapp.v1.UpdateBackupRequest.Builder.class); } private int bitField0_; public static final int UPDATE_MASK_FIELD_NUMBER = 1; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int BACKUP_FIELD_NUMBER = 2; private com.google.cloud.netapp.v1.Backup backup_; /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backup field is set. */ @java.lang.Override public boolean hasBackup() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backup. */ @java.lang.Override public com.google.cloud.netapp.v1.Backup getBackup() { return backup_ == null ? com.google.cloud.netapp.v1.Backup.getDefaultInstance() : backup_; } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.netapp.v1.BackupOrBuilder getBackupOrBuilder() { return backup_ == null ? com.google.cloud.netapp.v1.Backup.getDefaultInstance() : backup_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getBackup()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getBackup()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.netapp.v1.UpdateBackupRequest)) { return super.equals(obj); } com.google.cloud.netapp.v1.UpdateBackupRequest other = (com.google.cloud.netapp.v1.UpdateBackupRequest) obj; if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (hasBackup() != other.hasBackup()) return false; if (hasBackup()) { if (!getBackup().equals(other.getBackup())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } if (hasBackup()) { hash = (37 * hash) + BACKUP_FIELD_NUMBER; hash = (53 * hash) + getBackup().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.netapp.v1.UpdateBackupRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.netapp.v1.UpdateBackupRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * UpdateBackupRequest updates description and/or labels for a backup. * </pre> * * Protobuf type {@code google.cloud.netapp.v1.UpdateBackupRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.UpdateBackupRequest) com.google.cloud.netapp.v1.UpdateBackupRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.netapp.v1.BackupProto .internal_static_google_cloud_netapp_v1_UpdateBackupRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.netapp.v1.BackupProto .internal_static_google_cloud_netapp_v1_UpdateBackupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.netapp.v1.UpdateBackupRequest.class, com.google.cloud.netapp.v1.UpdateBackupRequest.Builder.class); } // Construct using com.google.cloud.netapp.v1.UpdateBackupRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUpdateMaskFieldBuilder(); getBackupFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } backup_ = null; if (backupBuilder_ != null) { backupBuilder_.dispose(); backupBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.netapp.v1.BackupProto .internal_static_google_cloud_netapp_v1_UpdateBackupRequest_descriptor; } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupRequest getDefaultInstanceForType() { return com.google.cloud.netapp.v1.UpdateBackupRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupRequest build() { com.google.cloud.netapp.v1.UpdateBackupRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupRequest buildPartial() { com.google.cloud.netapp.v1.UpdateBackupRequest result = new com.google.cloud.netapp.v1.UpdateBackupRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.netapp.v1.UpdateBackupRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.backup_ = backupBuilder_ == null ? backup_ : backupBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.netapp.v1.UpdateBackupRequest) { return mergeFrom((com.google.cloud.netapp.v1.UpdateBackupRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.netapp.v1.UpdateBackupRequest other) { if (other == com.google.cloud.netapp.v1.UpdateBackupRequest.getDefaultInstance()) return this; if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.hasBackup()) { mergeBackup(other.getBackup()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getBackupFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000001); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000001; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Field mask is used to specify the fields to be overwritten in the * Backup resource to be updated. * The fields specified in the update_mask are relative to the resource, not * the full request. A field will be overwritten if it is in the mask. If the * user does not provide a mask then all fields will be overwritten. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private com.google.cloud.netapp.v1.Backup backup_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.Backup, com.google.cloud.netapp.v1.Backup.Builder, com.google.cloud.netapp.v1.BackupOrBuilder> backupBuilder_; /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the backup field is set. */ public boolean hasBackup() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The backup. */ public com.google.cloud.netapp.v1.Backup getBackup() { if (backupBuilder_ == null) { return backup_ == null ? com.google.cloud.netapp.v1.Backup.getDefaultInstance() : backup_; } else { return backupBuilder_.getMessage(); } } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackup(com.google.cloud.netapp.v1.Backup value) { if (backupBuilder_ == null) { if (value == null) { throw new NullPointerException(); } backup_ = value; } else { backupBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setBackup(com.google.cloud.netapp.v1.Backup.Builder builderForValue) { if (backupBuilder_ == null) { backup_ = builderForValue.build(); } else { backupBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeBackup(com.google.cloud.netapp.v1.Backup value) { if (backupBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && backup_ != null && backup_ != com.google.cloud.netapp.v1.Backup.getDefaultInstance()) { getBackupBuilder().mergeFrom(value); } else { backup_ = value; } } else { backupBuilder_.mergeFrom(value); } if (backup_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearBackup() { bitField0_ = (bitField0_ & ~0x00000002); backup_ = null; if (backupBuilder_ != null) { backupBuilder_.dispose(); backupBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.netapp.v1.Backup.Builder getBackupBuilder() { bitField0_ |= 0x00000002; onChanged(); return getBackupFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.netapp.v1.BackupOrBuilder getBackupOrBuilder() { if (backupBuilder_ != null) { return backupBuilder_.getMessageOrBuilder(); } else { return backup_ == null ? com.google.cloud.netapp.v1.Backup.getDefaultInstance() : backup_; } } /** * * * <pre> * Required. The backup being updated * </pre> * * <code>.google.cloud.netapp.v1.Backup backup = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.Backup, com.google.cloud.netapp.v1.Backup.Builder, com.google.cloud.netapp.v1.BackupOrBuilder> getBackupFieldBuilder() { if (backupBuilder_ == null) { backupBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.netapp.v1.Backup, com.google.cloud.netapp.v1.Backup.Builder, com.google.cloud.netapp.v1.BackupOrBuilder>( getBackup(), getParentForChildren(), isClean()); backup_ = null; } return backupBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.UpdateBackupRequest) } // @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.UpdateBackupRequest) private static final com.google.cloud.netapp.v1.UpdateBackupRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.UpdateBackupRequest(); } public static com.google.cloud.netapp.v1.UpdateBackupRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateBackupRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateBackupRequest>() { @java.lang.Override public UpdateBackupRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateBackupRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateBackupRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.netapp.v1.UpdateBackupRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/rappor
35,280
client/javatest/com/google/rappor/EncoderTest.java
package com.google.rappor; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.BlockJUnit4ClassRunner; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.SecureRandom; /** * Unit tests for {@link Encoder}. */ @RunWith(BlockJUnit4ClassRunner.class) public class EncoderTest { @Rule public final ExpectedException thrown = ExpectedException.none(); /** * Convert a human readable string to a 32 byte userSecret for testing. * * <p>Do not use this in a production environment! For security, userSecret * must be at least 32 bytes of high-quality entropy. */ private static byte[] makeTestingUserSecret(String testingSecret) throws Exception { // We generate the fake user secret by concatenating three copies of the // 16 byte MD5 hash of the testingSecret string encoded in UTF 8. MessageDigest md5 = MessageDigest.getInstance("MD5"); byte[] digest = md5.digest(testingSecret.getBytes(StandardCharsets.UTF_8)); assertEquals(16, digest.length); return ByteBuffer.allocate(48).put(digest).put(digest).put(digest).array(); } private static long toLong(byte[] bytes) { assertThat(bytes.length, is(lessThanOrEqualTo(8))); ByteBuffer buffer = ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).put(bytes); buffer.rewind(); // can't chain rewind() because it returns Buffer, not ByteBuffer. return buffer.getLong(); } private static byte[] toBytes(long value) { return ByteBuffer.allocate(8).order(ByteOrder.LITTLE_ENDIAN).putLong(value).array(); } @Test public void testEncoderConstruction_goodArguments() throws Exception { // Full RAPPOR new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes // IRR-only (no PRR) new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes // PRR-only (no IRR) new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.0, // probabilityP 1.0, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_userSecretTooShort() throws Exception { thrown.expect(IllegalArgumentException.class); byte[] tooShortSecret = new byte[47]; new Encoder(tooShortSecret, // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_userSecretMayBeLong() throws Exception { byte[] tooLongSecret = new byte[49]; new Encoder(tooLongSecret, // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_numBitsTooLow() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 0, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_numBitsTooHigh() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 4097, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_probabilityFTooLow() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, -0.01, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_probabilityFTooHigh() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 1.01, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_probabilityPTooLow() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF -0.01, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_probabilityPTooHigh() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 1.01, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_probabilityQTooLow() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP -0.01, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_probabilityQTooHigh() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.75, // probabilityP 1.01, // probabilityQ 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_numCohortsTooLow() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 0, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_numCohortsTooHigh() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ Encoder.MAX_COHORTS + 1, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_numCohortsNotPowerOf2() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 3, // numCohorts 2); // numBloomHashes } @Test public void testEncoderConstruction_numBloomHashesTooLow() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 0); // numBloomHashes } @Test public void testEncoderConstruction_numBloomHashesTooHigh() throws Exception { thrown.expect(IllegalArgumentException.class); new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 9); // numBloomHashes } @Test public void testEncoderGetCohort() throws Exception { // This is a stable, random cohort assignment. assertEquals( 3, new Encoder(makeTestingUserSecret("Blotto"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 4, // numCohorts 2) // numBloomHashes .getCohort()); // With numCohorts == 1, the only possible cohort assigment is 0. assertEquals( 0, new Encoder(makeTestingUserSecret("Blotto"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2) // numBloomHashes .getCohort()); // Changing the user secret changes the cohort. assertEquals( 3, new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 4, // numCohorts 2) // numBloomHashes .getCohort()); assertEquals( 0, new Encoder( makeTestingUserSecret("Bar2"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 4, // numCohorts 2) // numBloomHashes .getCohort()); // Changing the encoder id does not changes the cohort. assertEquals( 3, new Encoder(makeTestingUserSecret("Blotto"), // userSecret "Foo1", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 4, // numCohorts 2) // numBloomHashes .getCohort()); assertEquals( 3, new Encoder(makeTestingUserSecret("Blotto"), // userSecret "Foo2", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 4, // numCohorts 2) // numBloomHashes .getCohort()); assertEquals( 3, new Encoder(makeTestingUserSecret("Blotto"), // userSecret "Foo3", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 4, // numCohorts 2) // numBloomHashes .getCohort()); // Cohort assignments are bit-wise subsets int cohortAssignmentBig = new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ Encoder.MAX_COHORTS, // numCohorts 2) // numBloomHashes .getCohort(); int numCohortsSmall = Encoder.MAX_COHORTS / 2; // Verify that numCohortsSmall is a power of 2. assertEquals(0, numCohortsSmall & (numCohortsSmall - 1)); int cohortAssignmentSmall = new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ numCohortsSmall, // numCohorts 2) // numBloomHashes .getCohort(); // This validates that the test case is well chosen. If it fails, select a different userSecret // or encoderId. assertNotEquals(cohortAssignmentBig, cohortAssignmentSmall); // Test that cohortAssignmentSmall is a suffix of cohortAssignmentBig when represented in // binary. assertEquals(cohortAssignmentBig & (numCohortsSmall - 1), cohortAssignmentSmall); } @Test public void testEncoderEncodeBits_identity() throws Exception { assertEquals( 0b11111101L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(0b11111101L)))); assertEquals( 0xD56B8119L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 32, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(0xD56B8119L)))); } @Test public void testEncoderEncodeBits_tooHigh() throws Exception { Encoder encoder = new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2); // numBloomHashes thrown.expect(IllegalArgumentException.class); encoder.encodeBits(toBytes(0x100)); // 9 bits } @Test public void testEncoderEncodeBoolean_identity() throws Exception { assertEquals( 0x1L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 1, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 1) // numBloomHashes .encodeBoolean(true))); assertEquals( 0x0L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 1, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 1) // numBloomHashes .encodeBoolean(false))); } @Test public void testEncoderEncodeOrdinal_identity() throws Exception { assertEquals( 0b000000000001L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 1) // numBloomHashes .encodeOrdinal(0))); assertEquals( 0b100000000000L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 1) // numBloomHashes .encodeOrdinal(11))); } @Test public void testEncoderEncodeOrdinal_tooLow() throws Exception { Encoder encoder = new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 1); // numBloomHashes thrown.expect(IllegalArgumentException.class); encoder.encodeOrdinal(-1); } @Test public void testEncoderEncodeOrdinal_tooHigh() throws Exception { Encoder encoder = new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 1); // numBloomHashes thrown.expect(IllegalArgumentException.class); encoder.encodeOrdinal(12); } @Test public void testEncoderEncodeString_identity() throws Exception { assertEquals( 0b000010000100L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts (so must be cohort 0) 2) // numBloomHashes .encodeString("Whizbang"))); // Changing the user but keeping the cohort the same (both cohort 0) // results in the same encoding. assertEquals( 0b000010000100L, toLong( new Encoder( makeTestingUserSecret("Blotto"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts (so must be cohort 0) 2) // numBloomHashes .encodeString("Whizbang"))); // When the user is in a different cohort, she gets a different encoding. Encoder cohortProbeEncoder = new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 4, // numCohorts 2); // numBloomHashes assertEquals(3, cohortProbeEncoder.getCohort()); assertEquals(0b000011000000L, toLong(cohortProbeEncoder.encodeString("Whizbang"))); // Changing the string gets a different encoding. assertEquals( 0b001001000000L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts (so must be cohort 0) 2) // numBloomHashes .encodeString("Xyzzy"))); assertEquals( 0b000000110000L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 12, // numBits, 0, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts (so must be cohort 0) 2) // numBloomHashes .encodeString("Thud"))); } @Test public void testEncoderEncodeBits_prrMemoizes() throws Exception { assertEquals( 0b01110101L, toLong( new Encoder( makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 0.25, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(0b11111101L)))); assertEquals( 0b11111101L, toLong( new Encoder( makeTestingUserSecret("Baz"), // userSecret "Foo", // encoderId 8, // numBits, 0.25, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(0b11111101L)))); } @Test public void testEncoderEncodeBits_prrFlipProbability() throws Exception { int numSamples = 10000; int numBits = 8; double probabilityF = 1.0 / 32.0; long inputValue = 0b11111101L; int counts[] = new int[64]; for (int iSample = 0; iSample < numSamples; iSample++) { Encoder encoder = new Encoder(makeTestingUserSecret("User" + iSample), // userSecret "Foo", // encoderId numBits, // numBits, probabilityF, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2); // numBloomHashes long encoded = toLong(encoder.encodeBits(toBytes(inputValue))); assertEquals(encoded, toLong(encoder.encodeBits(toBytes(inputValue)))); for (int iBit = 0; iBit < numBits; iBit++) { if ((encoded & (1L << iBit)) != 0) { counts[iBit]++; } } } assertEquals(9843, counts[0]); // input = 1, expectation = 9843.75 assertEquals(173, counts[1]); // input = 0, expectation = 156.25 assertEquals(9839, counts[2]); // input = 1, expectation = 9843.75 assertEquals(9831, counts[3]); // input = 1, expectation = 9843.75 assertEquals(9848, counts[4]); // input = 1, expectation = 9843.75 assertEquals(9828, counts[5]); // input = 1, expectation = 9843.75 assertEquals(9834, counts[6]); // input = 1, expectation = 9843.75 assertEquals(9837, counts[7]); // input = 1, expectation = 9843.75 // Check that no high-order bit past numBits ever got set. for (int iBit = numBits; iBit < 64; iBit++) { assertEquals(0, counts[iBit]); } } @Test public void testEncoderEncodeBits_irrFlipProbability() throws Exception { int numBits = 8; double probabilityP = 0.25; double probabilityQ = 0.85; long inputValue = 0b11111101L; SecureRandom random = SecureRandom.getInstance("SHA1PRNG"); random.setSeed(0x12345678L); int counts[] = new int[64]; for (int iSample = 0; iSample < 10000; iSample++) { Encoder encoder = new Encoder( random, null, // md5 null, // sha256 makeTestingUserSecret("User" + iSample), // userSecret "Foo", // encoderId numBits, // numBits, 0, // probabilityF probabilityP, // probabilityP probabilityQ, // probabilityQ 1, // numCohorts 2); // numBloomHashes long encoded = toLong(encoder.encodeBits(toBytes(inputValue))); for (int iBit = 0; iBit < numBits; iBit++) { if ((encoded & (1L << iBit)) != 0) { counts[iBit]++; } } } assertEquals(8481, counts[0]); // input = 1, 99.99% CI = [8358, 8636] assertEquals(2477, counts[1]); // input = 0, 99.99% CI = [2332, 2669] assertEquals(8486, counts[2]); // input = 1, 99.99% CI = [8358, 8636] assertEquals(8495, counts[3]); // input = 1, 99.99% CI = [8358, 8636] assertEquals(8563, counts[4]); // input = 1, 99.99% CI = [8358, 8636] assertEquals(8560, counts[5]); // input = 1, 99.99% CI = [8358, 8636] assertEquals(8481, counts[6]); // input = 1, 99.99% CI = [8358, 8636] assertEquals(8491, counts[7]); // input = 1, 99.99% CI = [8358, 8636] // Check that no high-order bit past numBits ever got set. for (int iBit = numBits; iBit < 64; iBit++) { assertEquals(0, counts[iBit]); } } @Test public void testEncoderEncodeBits_endToEnd() throws Exception { int numBits = 8; long inputValue = 0b11111101L; long prrValue = 0b01110101L; long prrAndIrrValue = 0b01110110L; // Verify that PRR is working as expected. assertEquals( prrValue, toLong( new Encoder( null, null, // md5 null, // sha256 makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId numBits, // numBits, 0.25, // probabilityF 0, // probabilityP 1, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue)))); // Verify that IRR is working as expected. SecureRandom random1 = SecureRandom.getInstance("SHA1PRNG"); random1.setSeed(0x12345678L); assertEquals( prrAndIrrValue, toLong( new Encoder( random1, null, // md5 null, // sha256 makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId numBits, // numBits, 0, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(prrValue)))); // Test that end-to-end is the result of PRR + IRR. SecureRandom random2 = SecureRandom.getInstance("SHA1PRNG"); random2.setSeed(0x12345678L); assertEquals( prrAndIrrValue, toLong( new Encoder( random2, null, // md5 null, // sha256 makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue)))); } @Test public void testEncoderEncodeBits_32BitValuesEncodeSuccessfully() throws Exception { // Regression test for b/22035650. int numBits = 32; byte[] userSecret = makeTestingUserSecret("Bar"); // Explicitly spot-check the output for 2^0 and 2^31. long inputValue0 = 1L; long outputValue0 = 590349342L; SecureRandom random0 = SecureRandom.getInstance("SHA1PRNG"); random0.setSeed(0x12345678L); assertEquals( outputValue0, toLong( new Encoder( random0, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue0)))); long inputValue31 = 1L << 31; long outputValue31 = 2746482838L; SecureRandom random31 = SecureRandom.getInstance("SHA1PRNG"); random31.setSeed(0x12345678L); assertEquals( outputValue31, toLong( new Encoder( random31, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue31)))); // Check the range 2^1 to 2^30, making sure no values produce exceptions. SecureRandom randomRange = SecureRandom.getInstance("SHA1PRNG"); randomRange.setSeed(0x12345678L); for (int i = 1; i <= 30; i++) { long inputValue = 1L << (i - 1); new Encoder( randomRange, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue)); } } @Test public void testEncoderEncodeBits_63BitValuesEncodeSuccessfully() throws Exception { int numBits = 63; byte[] userSecret = makeTestingUserSecret("Bar"); // Explicitly spot-check the output for 2^0 and 2^63. long inputValue0 = 1L; long outputValue0 = 867402030798341150L; SecureRandom random0 = SecureRandom.getInstance("SHA1PRNG"); random0.setSeed(0x12345678L); assertEquals( outputValue0, toLong( new Encoder( random0, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue0)))); long inputValue63 = 1L << 62; long outputValue63 = 5497102447743615126L; SecureRandom random63 = SecureRandom.getInstance("SHA1PRNG"); random63.setSeed(0x12345678L); assertEquals( outputValue63, toLong( new Encoder( random63, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue63)))); // Check the range 2^1 to 2^62, making sure no values produce exceptions. SecureRandom randomRange = SecureRandom.getInstance("SHA1PRNG"); randomRange.setSeed(0x12345678L); for (int i = 1; i <= 62; i++) { long inputValue = 1L << (i - 1); new Encoder( randomRange, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeBits(toBytes(inputValue)); } } @Test public void testEncoderEncodeBits_4096BitValuesEncodeSuccessfully() throws Exception { int numBits = 4096; byte[] userSecret = makeTestingUserSecret("Bar"); // Check the range 2^1 to 2^4095, making sure no values produce exceptions. SecureRandom randomRange = SecureRandom.getInstance("SHA1PRNG"); randomRange.setSeed(0x12345678L); // Stride is arbitrary, but chosen to be large enough to not cause too many probes (~40) and // prime to explore well. int stride = 97; for (int i = 1; i < numBits; i += stride) { new Encoder( randomRange, null, // md5 null, // sha256 userSecret, // userSecret "MyEncoder", // encoderId numBits, // numBits, 0.25, // probabilityF 0.3, // probabilityP 0.7, // probabilityQ 1, // numCohorts 2) // numBloomHashes .encodeOrdinal(i); } } @Test public void testGetEncoderId() throws Exception { Encoder encoder = new Encoder(makeTestingUserSecret("Bar"), // userSecret "Foo", // encoderId 8, // numBits, 13.0 / 128.0, // probabilityF 0.25, // probabilityP 0.75, // probabilityQ 1, // numCohorts 2); // numBloomHashes assertEquals("Foo", encoder.getEncoderId()); } }
apache/flink
34,907
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/common/JoinTestPrograms.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.exec.common; import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecJoin; import org.apache.flink.table.test.program.SinkTestStep; import org.apache.flink.table.test.program.SourceTestStep; import org.apache.flink.table.test.program.TableTestProgram; import org.apache.flink.types.Row; import org.apache.flink.types.RowKind; import java.util.stream.IntStream; /** {@link TableTestProgram} definitions for testing {@link StreamExecJoin}. */ public class JoinTestPrograms { public static final TableTestProgram NON_WINDOW_INNER_JOIN; public static final TableTestProgram NON_WINDOW_INNER_JOIN_WITH_NULL; public static final TableTestProgram CROSS_JOIN; public static final TableTestProgram JOIN_WITH_FILTER; public static final TableTestProgram INNER_JOIN_WITH_DUPLICATE_KEY; public static final TableTestProgram INNER_JOIN_WITH_NON_EQUI_JOIN; public static final TableTestProgram INNER_JOIN_WITH_EQUAL_PK; public static final TableTestProgram INNER_JOIN_WITH_PK; public static final TableTestProgram LEFT_JOIN; public static final TableTestProgram FULL_OUTER; public static final TableTestProgram RIGHT_JOIN; public static final TableTestProgram SEMI_JOIN; public static final TableTestProgram ANTI_JOIN; public static final TableTestProgram JOIN_WITH_STATE_TTL_HINT; public static final TableTestProgram SEMI_ANTI_JOIN_WITH_LITERAL_AGG; public static final TableTestProgram OUTER_JOIN_CHANGELOG_TEST = TableTestProgram.of("join-duplicate-emission-bug", "bug with CTE and left join") .setupTableSource( SourceTestStep.newBuilder("upsert_table_with_duplicates") .addSchema( "`execution_plan_id` VARCHAR(2147483647) NOT NULL", "`workflow_id` VARCHAR(2147483647) NOT NULL", "`event_section_id` VARCHAR(2147483647) NOT NULL", "CONSTRAINT `PRIMARY` PRIMARY KEY (`execution_plan_id`, `event_section_id`) NOT ENFORCED") .addOption("changelog-mode", "I, UA,D") .producedValues( IntStream.range(0, 13) .mapToObj( i -> Row.ofKind( RowKind.UPDATE_AFTER, "section_id_1", "section_id_2", "section_id_3")) .toArray(Row[]::new)) .build()) .setupTableSink( SinkTestStep.newBuilder("sink") .addSchema("event_element_id STRING", "cnt BIGINT") .testMaterializedData() .consumedValues(Row.of("pk-1", 1), Row.of("pk-2", 1)) .build()) .runSql( "INSERT INTO sink WITH\n" + " section_detail as (\n" + " SELECT s.event_section_id\n" + " \n" + " FROM upsert_table_with_duplicates s\n" + " ),\n" + "\n" + " event_element as (\n" + " SELECT\n" + " ed.id as event_element_id\n" + " FROM (\n" + " SELECT\n" + " 'pk-2' id,\n" + " 'section_id_3' section_id\n" + " UNION ALL\n" + " SELECT\n" + " 'pk-1' id,\n" + " 'section_id_3' section_id\n" + " ) ed \n" + " LEFT JOIN\n" + " section_detail as s\n" + " ON s.event_section_id = ed.section_id\n" + " )\n" + "\n" + "SELECT event_element_id, COUNT(*) cnt\n" + "FROM event_element\n" + "GROUP BY event_element_id") .build(); static final SourceTestStep EMPLOYEE = SourceTestStep.newBuilder("EMPLOYEE") .addSchema("deptno int", "salary bigint", "name varchar") .addOption("filterable-fields", "salary") .producedBeforeRestore( Row.of(null, 101L, "Adam"), Row.of(1, 1L, "Baker"), Row.of(2, 2L, "Charlie"), Row.of(3, 2L, "Don"), Row.of(7, 6L, "Victor")) .producedAfterRestore( Row.of(4, 3L, "Juliet"), Row.of(4, 4L, "Helena"), Row.of(1, 1L, "Ivana")) .build(); static final SourceTestStep DEPARTMENT = SourceTestStep.newBuilder("DEPARTMENT") .addSchema( "department_num int", "b2 bigint", "b3 int", "department_name varchar") .producedBeforeRestore( Row.of(null, 102L, 0, "Accounting"), Row.of(1, 1L, 0, "Research"), Row.of(2, 2L, 1, "Human Resources"), Row.of(2, 3L, 2, "HR"), Row.of(3, 1L, 2, "Sales")) .producedAfterRestore( Row.of(2, 4L, 3, "People Operations"), Row.of(4, 2L, 4, "Engineering")) .build(); static final SourceTestStep DEPARTMENT_NONULLS = SourceTestStep.newBuilder("DEPARTMENT") .addSchema( "department_num int", "b2 bigint", "b3 int", "department_name varchar") .producedBeforeRestore( Row.of(1, 1L, 0, "Research"), Row.of(2, 2L, 1, "Human Resources"), Row.of(2, 3L, 2, "HR"), Row.of(3, 1L, 2, "Sales")) .producedAfterRestore(Row.of(2, 4L, 3, "People Operations")) .build(); static final SourceTestStep SOURCE_T1 = SourceTestStep.newBuilder("T1") .addSchema("a int", "b bigint", "c varchar") .producedBeforeRestore( Row.of(1, 1L, "Baker1"), Row.of(1, 2L, "Baker2"), Row.of(1, 2L, "Baker2"), Row.of(1, 5L, "Baker3"), Row.of(2, 7L, "Baker5"), Row.of(1, 9L, "Baker6"), Row.of(1, 8L, "Baker8"), Row.of(3, 8L, "Baker9")) .producedAfterRestore(Row.of(1, 1L, "PostRestore")) .build(); static final SourceTestStep SOURCE_T2 = SourceTestStep.newBuilder("T2") .addSchema("a int", "b bigint", "c varchar") .producedBeforeRestore( Row.of(1, 1L, "BakerBaker"), Row.of(2, 2L, "HeHe"), Row.of(3, 2L, "HeHe")) .producedAfterRestore(Row.of(2, 1L, "PostRestoreRight")) .build(); static { NON_WINDOW_INNER_JOIN = TableTestProgram.of("join-non-window-inner-join", "test non-window inner join") .setupTableSource(SOURCE_T1) .setupTableSource(SOURCE_T2) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("a int", "c1 varchar", "c2 varchar") .consumedBeforeRestore( Row.of(1, "BakerBaker", "Baker2"), Row.of(1, "BakerBaker", "Baker2"), Row.of(1, "BakerBaker", "Baker3"), Row.of(2, "HeHe", "Baker5"), Row.of(1, "BakerBaker", "Baker6"), Row.of(1, "BakerBaker", "Baker8")) .consumedAfterRestore( Row.of(2, "PostRestoreRight", "Baker5")) .build()) .runSql( "insert into MySink " + "SELECT t2.a, t2.c, t1.c\n" + "FROM (\n" + " SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T1\n" + ") as t1\n" + "JOIN (\n" + " SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T2\n" + ") as t2\n" + "ON t1.a = t2.a AND t1.b > t2.b") .build(); NON_WINDOW_INNER_JOIN_WITH_NULL = TableTestProgram.of( "join-non-window-inner-join-with-null-cond", "test non-window inner join") .setupTableSource(SOURCE_T1) .setupTableSource(SOURCE_T2) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("a int", "c1 varchar", "c2 varchar") .consumedBeforeRestore( Row.of(1, "BakerBaker", "Baker2"), Row.of(1, "BakerBaker", "Baker2"), Row.of(1, "BakerBaker", "Baker3"), Row.of(2, "HeHe", "Baker5"), Row.of(1, "BakerBaker", "Baker6"), Row.of(1, "BakerBaker", "Baker8"), Row.of(null, "HeHe", "Baker9")) .consumedAfterRestore( Row.of(2, "PostRestoreRight", "Baker5")) .build()) .runSql( "insert into MySink " + "SELECT t2.a, t2.c, t1.c\n" + "FROM (\n" + " SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T1\n" + ") as t1\n" + "JOIN (\n" + " SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T2\n" + ") as t2\n" + "ON \n" + " ((t1.a is null AND t2.a is null) OR\n" + " (t1.a = t2.a))\n" + " AND t1.b > t2.b") .build(); CROSS_JOIN = TableTestProgram.of("join-cross-join", "test cross join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar", "department_name varchar") .consumedBeforeRestore( Row.of("Adam", "Accounting"), Row.of("Baker", "Accounting"), Row.of("Adam", "Research"), Row.of("Baker", "Research"), Row.of("Charlie", "Accounting"), Row.of("Charlie", "Research"), Row.of("Charlie", "Human Resources"), Row.of("Adam", "Human Resources"), Row.of("Baker", "Human Resources"), Row.of("Don", "Accounting"), Row.of("Don", "Human Resources"), Row.of("Don", "Research"), Row.of("Victor", "Accounting"), Row.of("Victor", "Human Resources"), Row.of("Victor", "Research"), Row.of("Don", "HR"), Row.of("Charlie", "HR"), Row.of("Adam", "HR"), Row.of("Baker", "HR"), Row.of("Victor", "HR"), Row.of("Don", "Sales"), Row.of("Charlie", "Sales"), Row.of("Adam", "Sales"), Row.of("Baker", "Sales"), Row.of("Victor", "Sales")) .consumedAfterRestore( Row.of("Juliet", "Human Resources"), Row.of("Juliet", "Sales"), Row.of("Juliet", "Research"), Row.of("Juliet", "Accounting"), Row.of("Juliet", "HR"), Row.of("Juliet", "People Operations"), Row.of("Victor", "People Operations"), Row.of("Charlie", "People Operations"), Row.of("Baker", "People Operations"), Row.of("Adam", "People Operations"), Row.of("Don", "People Operations"), Row.of("Helena", "Accounting"), Row.of("Helena", "Human Resources"), Row.of("Helena", "HR"), Row.of("Helena", "People Operations"), Row.of("Helena", "Sales"), Row.of("Helena", "Research"), Row.of("Don", "Engineering"), Row.of("Adam", "Engineering"), Row.of("Victor", "Engineering"), Row.of("Baker", "Engineering"), Row.of("Charlie", "Engineering"), Row.of("Juliet", "Engineering"), Row.of("Helena", "Engineering"), Row.of("Ivana", "Accounting"), Row.of("Ivana", "Human Resources"), Row.of("Ivana", "HR"), Row.of("Ivana", "Engineering"), Row.of("Ivana", "People Operations"), Row.of("Ivana", "Sales"), Row.of("Ivana", "Research")) .build()) .runSql( "insert into MySink " + "SELECT name, department_name FROM EMPLOYEE, DEPARTMENT") .build(); JOIN_WITH_FILTER = TableTestProgram.of("join-with-filter", "test join with filter") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar", "department_name varchar") .consumedBeforeRestore( Row.of("Baker", "Research"), Row.of("Baker", "Sales")) .consumedAfterRestore( Row.of("Ivana", "Sales"), Row.of("Ivana", "Research")) .build()) .runSql( "insert into MySink " + "SELECT name, department_name FROM EMPLOYEE, DEPARTMENT where salary = b2 and salary < CAST(2 AS BIGINT)") .build(); INNER_JOIN_WITH_DUPLICATE_KEY = TableTestProgram.of( "join-inner-join-with-duplicate-key", "inner join with duplicate key") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("deptno int", "department_num int") .consumedBeforeRestore(Row.of(2, 2)) .consumedAfterRestore(Row.of(4, 4), Row.of(4, 4)) .build()) .runSql( "insert into MySink " + "SELECT deptno, department_num FROM EMPLOYEE JOIN DEPARTMENT ON deptno = department_num AND deptno = b3") .build(); INNER_JOIN_WITH_NON_EQUI_JOIN = TableTestProgram.of( "join-inner-join-with-non-equi-join", "inner join with non-equi join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar", "department_name varchar") .consumedBeforeRestore(Row.of("Don", "Sales")) .consumedAfterRestore( Row.of("Helena", "Engineering"), Row.of("Juliet", "Engineering")) .build()) .runSql( "insert into MySink " + "SELECT name, department_name FROM EMPLOYEE JOIN DEPARTMENT ON deptno = department_num AND salary > b2") .build(); String query1 = "SELECT MIN(salary) AS salary, deptno FROM EMPLOYEE GROUP BY deptno"; String query2 = "SELECT MIN(b2) AS b2, department_num FROM DEPARTMENT GROUP BY department_num"; INNER_JOIN_WITH_EQUAL_PK = TableTestProgram.of("join-inner-join-with-equal-pk", "inner join with equal pk") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("deptno int", "department_num int") .consumedBeforeRestore( Row.of(1, 1), Row.of(2, 2), Row.of(3, 3)) .consumedAfterRestore(Row.of(4, 4)) .build()) .runSql( String.format( "INSERT INTO MySink SELECT deptno, department_num FROM (%s) JOIN (%s) ON deptno = department_num", query1, query2)) .build(); INNER_JOIN_WITH_PK = TableTestProgram.of("join-inner-join-with-pk", "inner join with pk") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("deptno int", "department_num int") .consumedBeforeRestore( Row.of(1, 1), Row.of(2, 2), Row.of(3, 2), Row.of(1, 3)) .consumedAfterRestore(Row.of(3, 4), Row.of(2, 4)) .testMaterializedData() .build()) .runSql( String.format( "INSERT INTO MySink SELECT deptno, department_num FROM (%s) JOIN (%s) ON salary = b2", query1, query2)) .build(); FULL_OUTER = TableTestProgram.of("join-outer-join", "outer join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar", "department_name varchar") .consumedBeforeRestore( Row.of("Adam", null), Row.of(null, "Accounting"), Row.of("Baker", "Research"), Row.of("Charlie", "Human Resources"), Row.of("Charlie", "HR"), Row.of("Don", "Sales"), Row.of("Victor", null)) .consumedAfterRestore( Row.of("Helena", "Engineering"), Row.of("Juliet", "Engineering"), Row.of("Ivana", "Research"), Row.of("Charlie", "People Operations")) .testMaterializedData() .build()) .runSql( "insert into MySink " + "SELECT name, department_name FROM EMPLOYEE FULL OUTER JOIN DEPARTMENT ON deptno = department_num") .build(); LEFT_JOIN = TableTestProgram.of("join-left-join", "left join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar", "department_name varchar") .consumedBeforeRestore( Row.of("Adam", null), Row.of("Baker", "Research"), Row.of("Charlie", "Human Resources"), Row.of("Charlie", "HR"), Row.of("Don", "Sales"), Row.of("Victor", null)) .consumedAfterRestore( Row.of("Helena", "Engineering"), Row.of("Juliet", "Engineering"), Row.of("Ivana", "Research"), Row.of("Charlie", "People Operations")) .testMaterializedData() .build()) .runSql( "insert into MySink " + "SELECT name, department_name FROM EMPLOYEE LEFT JOIN DEPARTMENT ON deptno = department_num") .build(); RIGHT_JOIN = TableTestProgram.of("join-right-join", "right join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar", "department_name varchar") .consumedBeforeRestore( Row.of(null, "Accounting"), Row.of("Baker", "Research"), Row.of("Charlie", "Human Resources"), Row.of("Charlie", "HR"), Row.of("Don", "Sales")) .consumedAfterRestore( Row.of("Helena", "Engineering"), Row.of("Juliet", "Engineering"), Row.of("Ivana", "Research"), Row.of("Charlie", "People Operations")) .testMaterializedData() .build()) .runSql( "insert into MySink " + "SELECT name, department_name FROM EMPLOYEE RIGHT OUTER JOIN DEPARTMENT ON deptno = department_num") .build(); SEMI_JOIN = TableTestProgram.of("join-semi-join", "semi join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar") .consumedBeforeRestore( Row.of("Baker"), Row.of("Charlie"), Row.of("Don")) .consumedAfterRestore( Row.of("Helena"), Row.of("Juliet"), Row.of("Ivana")) .build()) .runSql( "insert into MySink " + "SELECT name FROM EMPLOYEE WHERE deptno IN (SELECT department_num FROM DEPARTMENT)") .build(); ANTI_JOIN = TableTestProgram.of("join-anti-join", "anti join") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT_NONULLS) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("name varchar") .consumedBeforeRestore(Row.of("Victor")) .consumedAfterRestore(Row.of("Juliet"), Row.of("Helena")) .testMaterializedData() .build()) .runSql( "insert into MySink " + "SELECT name FROM EMPLOYEE WHERE deptno NOT IN (SELECT department_num FROM DEPARTMENT)") .build(); JOIN_WITH_STATE_TTL_HINT = TableTestProgram.of("join-with-state-ttl-hint", "join with state ttl hint") .setupTableSource(EMPLOYEE) .setupTableSource(DEPARTMENT) .setupTableSink( SinkTestStep.newBuilder("MySink") .addSchema("deptno int", "department_num int") .consumedBeforeRestore( Row.of(1, 1), Row.of(2, 2), Row.of(3, 3)) .consumedAfterRestore(Row.of(4, 4)) .build()) .runSql( String.format( "INSERT INTO MySink SELECT /*+ STATE_TTL('v1' = '1d', 'v2' = '4d'), STATE_TTL('v2' = '8d') */deptno, department_num FROM (%s) v1 JOIN (%s) v2 ON deptno = department_num", query1, query2)) .build(); SEMI_ANTI_JOIN_WITH_LITERAL_AGG = TableTestProgram.of("semi-anti-join-with-literal-agg", "join with literal agg") .setupTableSource( SourceTestStep.newBuilder("source_t1") .addSchema("a INTEGER", "b BIGINT", "c STRING") .producedBeforeRestore( Row.of(1, 2L, "3"), Row.of(12, 34L, "56")) .build()) .setupTableSource( SourceTestStep.newBuilder("source_t2") .addSchema("d INTEGER", "e BIGINT", "f STRING") .producedBeforeRestore( Row.of(1, 2L, "3"), Row.of(11, 22L, "33")) .build()) .setupTableSource( SourceTestStep.newBuilder("source_t3") .addSchema("i INTEGER", "j BIGINT", "k STRING") .producedBeforeRestore( Row.of(1, 2L, "3"), Row.of(111, 222L, "333")) .build()) .setupTableSink( SinkTestStep.newBuilder("sink_t") .addSchema("b BIGINT") .consumedBeforeRestore( "+I[2]", "+I[34]", "-D[2]", "-D[34]", "+I[2]", "+I[34]") .build()) .runSql( "INSERT INTO sink_t SELECT b FROM source_t1 WHERE" + " (CASE WHEN a NOT IN (SELECT i FROM source_t3) THEN 1" + " WHEN a NOT IN (SELECT CAST(j AS INTEGER) FROM source_t3) THEN 2 ELSE 3 END)" + " NOT IN (SELECT d FROM source_t2 WHERE source_t1.c = source_t2.f)") .build(); } }
apache/openmeetings
33,000
openmeetings-web/src/main/java/org/apache/openmeetings/web/room/RoomPanel.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License") + you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.openmeetings.web.room; import static de.agilecoders.wicket.core.markup.html.bootstrap.dialog.Modal.BUTTON_MARKUP_ID; import static java.time.Duration.ZERO; import static org.apache.openmeetings.core.util.ChatWebSocketHelper.ID_USER_PREFIX; import static org.apache.openmeetings.db.entity.calendar.Appointment.allowedStart; import static org.apache.openmeetings.web.app.WebSession.getDateFormat; import static org.apache.openmeetings.web.app.WebSession.getUserId; import static org.apache.openmeetings.web.room.VideoSettings.VIDEO_SETTINGS_JS; import static org.apache.openmeetings.util.OmFileHelper.EXTENSION_PDF; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.Calendar; import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.stream.Stream; import org.apache.commons.text.StringEscapeUtils; import org.apache.openmeetings.core.util.WebSocketHelper; import org.apache.openmeetings.db.dao.calendar.AppointmentDao; import org.apache.openmeetings.db.dao.file.FileItemDao; import org.apache.openmeetings.db.dao.user.UserDao; import org.apache.openmeetings.db.entity.basic.Client; import org.apache.openmeetings.db.entity.calendar.Appointment; import org.apache.openmeetings.db.entity.file.BaseFileItem; import org.apache.openmeetings.db.entity.room.Room; import org.apache.openmeetings.db.entity.room.Room.Right; import org.apache.openmeetings.db.entity.room.Room.RoomElement; import org.apache.openmeetings.db.entity.room.RoomGroup; import org.apache.openmeetings.db.entity.server.SOAPLogin; import org.apache.openmeetings.db.util.AuthLevelUtil; import org.apache.openmeetings.db.util.ws.RoomMessage; import org.apache.openmeetings.db.util.ws.RoomMessage.Type; import org.apache.openmeetings.db.util.ws.TextRoomMessage; import org.apache.openmeetings.web.app.Application; import org.apache.openmeetings.web.app.ClientManager; import org.apache.openmeetings.web.app.QuickPollManager; import org.apache.openmeetings.web.app.TimerService; import org.apache.openmeetings.web.app.WebSession; import org.apache.openmeetings.web.common.BasePanel; import org.apache.openmeetings.web.room.activities.Activity; import org.apache.openmeetings.web.room.menu.RoomMenuPanel; import org.apache.openmeetings.web.room.sidebar.RoomSidebar; import org.apache.openmeetings.web.room.wb.AbstractWbPanel; import org.apache.openmeetings.web.room.wb.InterviewWbPanel; import org.apache.openmeetings.web.room.wb.WbAction; import org.apache.openmeetings.web.room.wb.WbPanel; import org.apache.openmeetings.web.util.ExtendedClientProperties; import org.apache.openmeetings.util.NullStringer; import org.apache.openmeetings.web.util.TouchPunchResourceReference; import org.apache.wicket.AttributeModifier; import org.apache.wicket.Component; import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeInstantiation; import org.apache.wicket.core.request.handler.IPartialPageRequestHandler; import org.apache.wicket.event.IEvent; import org.apache.wicket.extensions.ajax.AjaxDownloadBehavior; import org.apache.wicket.markup.head.HeaderItem; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.markup.head.OnDomReadyHeaderItem; import org.apache.wicket.markup.head.PriorityHeaderItem; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.repeater.RepeatingView; import org.apache.wicket.model.IModel; import org.apache.wicket.model.Model; import org.apache.wicket.model.ResourceModel; import org.apache.wicket.protocol.ws.api.BaseWebSocketBehavior; import org.apache.wicket.protocol.ws.api.event.WebSocketPushPayload; import org.apache.wicket.request.cycle.RequestCycle; import org.apache.wicket.request.mapper.parameter.PageParameters; import org.apache.wicket.request.resource.ResourceStreamResource; import org.apache.wicket.util.resource.FileResourceStream; import org.apache.wicket.util.resource.IResourceStream; import org.apache.wicket.util.string.Strings; import org.apache.openmeetings.mediaserver.KurentoHandler; import org.apache.openmeetings.mediaserver.StreamProcessor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.github.openjson.JSONArray; import com.github.openjson.JSONObject; import org.wicketstuff.jquery.core.JQueryBehavior; import org.wicketstuff.jquery.core.Options; import org.wicketstuff.jquery.ui.interaction.droppable.Droppable; import org.wicketstuff.jquery.ui.settings.JQueryUILibrarySettings; import de.agilecoders.wicket.core.markup.html.bootstrap.button.BootstrapAjaxLink; import de.agilecoders.wicket.core.markup.html.bootstrap.button.Buttons; import de.agilecoders.wicket.core.markup.html.bootstrap.dialog.Alert; import de.agilecoders.wicket.core.markup.html.bootstrap.dialog.Modal; import de.agilecoders.wicket.core.markup.html.bootstrap.dialog.Modal.Backdrop; import jakarta.inject.Inject; import de.agilecoders.wicket.core.markup.html.bootstrap.dialog.TextContentModal; @AuthorizeInstantiation("ROOM") public class RoomPanel extends BasePanel { private static final long serialVersionUID = 1L; private static final Logger log = LoggerFactory.getLogger(RoomPanel.class); public static final String PARAM_ACTION = "action"; private static final String ACCESS_DENIED_ID = "access-denied"; private static final String EVENT_DETAILS_ID = "event-details"; public enum Action { KICK("kick") , MUTE_OTHERS("muteOthers") , MUTE("mute") , TOGGLE_RIGHT("toggleRight"); private final String jsName; private Action(String jsName) { this.jsName = jsName; } public static Action of(String jsName) { return Stream.of(Action.values()) .filter(a -> a.jsName.equals(jsName)) .findAny() .orElse(null); } } private final Room r; private final WebMarkupContainer room = new WebMarkupContainer("roomContainer"); private final AbstractDefaultAjaxBehavior roomEnter = new AbstractDefaultAjaxBehavior() { private static final long serialVersionUID = 1L; @Override protected void respond(AjaxRequestTarget target) { log.debug("RoomPanel::roomEnter"); WebSession ws = WebSession.get(); Client c = getClient(); JSONObject options = VideoSettings.getInitJson(c.getSid()) .put("uid", c.getUid()) .put("userId", c.getUserId()) .put("rights", c.toJson(true).getJSONArray("rights")) .put("interview", r.isInterview()) .put("audioOnly", r.isAudioOnly()) .put("allowRecording", r.isAllowRecording()) .put("questions", r.isAllowUserQuestions()) .put("showMicStatus", !r.getHiddenElements().contains(RoomElement.MICROPHONE_STATUS)); if (!Strings.isEmpty(r.getRedirectURL()) && (ws.getSoapLogin() != null || ws.getInvitation() != null)) { options.put("reloadUrl", r.getRedirectURL()); } StringBuilder sb = new StringBuilder("Room.init(").append(options.toString(new NullStringer())).append(");") .append(wb.getInitScript()) .append(getQuickPollJs()); sb.append(sendClientsOnInit()); target.appendJavaScript(sb); WebSocketHelper.sendRoom(new TextRoomMessage(r.getId(), c, RoomMessage.Type.ROOM_ENTER, c.getUid())); // play video from other participants initVideos(target); getMainPanel().getChat().roomEnter(r, target); if (r.isFilesOpened()) { sidebar.setFilesActive(target); } if (Room.Type.PRESENTATION != r.getType()) { boolean modsEmpty = noModerators(); log.debug("RoomPanel::roomEnter, mods IS EMPTY ? {}, is MOD ? {}", modsEmpty, c.hasRight(Room.Right.MODERATOR)); if (modsEmpty) { showIdeaAlert(target, getString(r.isModerated() ? "641" : "498")); } } if (r.isWaitRecording()) { showIdeaAlert(target, getString("1315")); } wb.update(target); jsInited = true; } private CharSequence sendClientsOnInit() { Client c = getClient(); StringBuilder res = new StringBuilder(); if (c.hasRight(Room.Right.MODERATOR) || !r.isHidden(RoomElement.USER_COUNT)) { res.append(createAddClientJs(c)); } return res; } private void initVideos(AjaxRequestTarget target) { StringBuilder sb = new StringBuilder(); JSONArray streams = new JSONArray(); cm.streamByRoom(getRoom().getId()) .map(Client::getStreams) .flatMap(List::stream) .forEach(sd -> streams.put(sd.toJson())); if (streams.length() > 0) { sb.append("VideoManager.play(").append(streams).append(", ").append(kHandler.getTurnServers(getClient())).append(");"); } if (r.isInterview() && streamProcessor.recordingAllowed(getClient())) { sb.append("WbArea.setRecEnabled(true);"); } if (!Strings.isEmpty(sb)) { target.appendJavaScript(sb); } } }; private RedirectMessageDialog roomClosed; private Modal<String> clientKicked; private Alert waitModerator; private RoomMenuPanel menu; private RoomSidebar sidebar; private final AbstractWbPanel wb; private String fuid; private String ftype; private final AjaxDownloadBehavior download = new AjaxDownloadBehavior(new ResourceStreamResource() { private static final long serialVersionUID = 1L; { setCacheDuration(ZERO); } @Override protected IResourceStream getResourceStream(Attributes attributes) { setFileName(EXTENSION_PDF.equals(ftype) ? "whiteboard.pdf" : "slide.png"); return new FileResourceStream(Paths.get(System.getProperty("java.io.tmpdir"), fuid).toFile()); } }) { private static final long serialVersionUID = 1L; @Override protected void onDownloadCompleted(AjaxRequestTarget target) { super.onDownloadCompleted(target); try { Files.deleteIfExists(Paths.get(System.getProperty("java.io.tmpdir"), fuid)); } catch (Exception e) { log.error("unexcepted error while clean-up", e); } fuid = null; ftype = null; } }; Component eventDetail = new WebMarkupContainer(EVENT_DETAILS_ID).setVisible(false); private boolean avInited = false; private boolean jsInited = false; @Inject private ClientManager cm; @Inject private UserDao userDao; @Inject private AppointmentDao apptDao; @Inject private QuickPollManager qpollManager; @Inject private KurentoHandler kHandler; @Inject private StreamProcessor streamProcessor; @Inject private TimerService timerService; @Inject private FileItemDao fileDao; public RoomPanel(String id, Room r) { super(id); this.r = r; this.wb = r.isInterview() ? new InterviewWbPanel("whiteboard", this) : new WbPanel("whiteboard", this); } public void startDownload(IPartialPageRequestHandler handler, String type, String fuid) { this.fuid = fuid; ftype = type; download.initiate(handler); } @Override protected void onInitialize() { super.onInitialize(); //let's refresh user in client Client c = getClient().updateUser(userDao); Component accessDenied = new WebMarkupContainer(ACCESS_DENIED_ID).setVisible(false); room.setOutputMarkupPlaceholderTag(true); room.add(menu = new RoomMenuPanel("menu", this)); room.add(AttributeModifier.append("data-room-id", r.getId())); if (r.isInterview()) { room.add(new WebMarkupContainer("wb-area").add(wb)); } else { Droppable<BaseFileItem> wbArea = new Droppable<>("wb-area") { private static final long serialVersionUID = 1L; @Override public void onConfigure(JQueryBehavior behavior) { super.onConfigure(behavior); behavior.setOption("hoverClass", Options.asString("droppable-hover")); behavior.setOption("accept", Options.asString(".recorditem, .fileitem, .readonlyitem")); } @Override public void onDrop(AjaxRequestTarget target, Component component) { Object o = component.getDefaultModelObject(); if (wb.isVisible() && o instanceof BaseFileItem f) { if (sidebar.getFilesPanel().isSelected(f)) { for (Entry<String, BaseFileItem> e : sidebar.getFilesPanel().getSelected().entrySet()) { wb.sendFileToWb(e.getValue(), false); } } else { wb.sendFileToWb(f, false); } } } }; room.add(wbArea.add(wb)); } room.add(roomEnter); room.add(sidebar = new RoomSidebar("sidebar", this)); add(roomClosed = new RedirectMessageDialog("room-closed", "1098", r.isClosed(), r.getRedirectURL())); if (r.isClosed()) { room.setVisible(false); } else if (cm.streamByRoom(r.getId()).count() >= r.getCapacity()) { accessDenied = new ExpiredMessageDialog(ACCESS_DENIED_ID, getString("99"), menu); room.setVisible(false); } else if (r.getId().equals(WebSession.get().getRoomId())) { // secureHash/invitationHash, already checked } else { boolean allowed = Application.get().isRoomAllowedToUser(r, c.getUser()); String deniedMessage = null; if (r.isAppointment()) { Appointment a = apptDao.getByRoom(r.getId()); if (allowed) { Calendar cal = WebSession.getCalendar(); if (a.isOwner(getUserId()) || cal.getTime().after(allowedStart(a.getStart())) && cal.getTime().before(a.getEnd())) { eventDetail = new EventDetailDialog(EVENT_DETAILS_ID, a); } else { allowed = false; deniedMessage = String.format("%s %s - %s", getString("error.hash.period"), getDateFormat().format(a.getStart()), getDateFormat().format(a.getEnd())); } } } if (!allowed) { if (deniedMessage == null) { deniedMessage = getString("1599"); } accessDenied = new ExpiredMessageDialog(ACCESS_DENIED_ID, deniedMessage, menu); room.setVisible(false); } } RepeatingView groupstyles = new RepeatingView("groupstyle"); add(groupstyles.setVisible(room.isVisible() && !r.getGroups().isEmpty())); if (room.isVisible()) { add(new NicknameDialog("nickname", this)); add(download); add(new BaseWebSocketBehavior("media")); for (RoomGroup rg : r.getGroups()) { WebMarkupContainer groupstyle = new WebMarkupContainer(groupstyles.newChildId()); groupstyle.add(AttributeModifier.append("href" , (String)RequestCycle.get().urlFor(new GroupCustomCssResourceReference(), new PageParameters().add("id", rg.getGroup().getId())) )); groupstyles.add(groupstyle); } //We are setting initial rights here final int count = cm.addToRoom(c.setRoom(getRoom())); SOAPLogin soap = WebSession.get().getSoapLogin(); if (soap != null && soap.isModerator()) { c.allow(Right.SUPER_MODERATOR); } else { Set<Right> rr = AuthLevelUtil.getRoomRight(c.getUser(), r, r.isAppointment() ? apptDao.getByRoom(r.getId()) : null, count); if (!rr.isEmpty()) { c.allow(rr); log.info("Setting rights for client:: {} -> {}", rr, c.hasRight(Right.MODERATOR)); } } if (r.isModerated() && r.isWaitModerator() && !c.hasRight(Right.MODERATOR) && noModerators()) { room.setVisible(false); createWaitModerator(true); getMainPanel().getChat().toggle(null, false); } timerService.scheduleModCheck(r); } else { add(new WebMarkupContainer("nickname").setVisible(false)); } cm.update(c); if (waitModerator == null) { createWaitModerator(false); } add(room, accessDenied, eventDetail, waitModerator); add(clientKicked = new TextContentModal("client-kicked", new ResourceModel("606"))); clientKicked .header(new ResourceModel("797")) .setCloseOnEscapeKey(false) .setBackdrop(Backdrop.FALSE) .addButton(new BootstrapAjaxLink<>(BUTTON_MARKUP_ID, Model.of(""), Buttons.Type.Outline_Primary, new ResourceModel("54")) { private static final long serialVersionUID = 1L; public void onClick(AjaxRequestTarget target) { clientKicked.close(target); menu.exit(target); } }); } @Override public void onEvent(IEvent<?> event) { if (!jsInited) { return; } Client curClient = getClient(); if (curClient != null && event.getPayload() instanceof WebSocketPushPayload) { WebSocketPushPayload wsEvent = (WebSocketPushPayload) event.getPayload(); if (wsEvent.getMessage() instanceof RoomMessage m) { IPartialPageRequestHandler handler = wsEvent.getHandler(); switch (m.getType()) { case POLL_CREATED: menu.updatePoll(handler, m.getUserId()); break; case POLL_UPDATED: menu.updatePoll(handler, null); break; case RECORDING_TOGGLED: menu.update(handler); updateInterviewRecordingButtons(handler); break; case SHARING_TOGGLED: menu.update(handler); break; case RIGHT_UPDATED: onRightUpdated(curClient, (TextRoomMessage)m, handler); break; case ROOM_ENTER: onRoomEnter(curClient, (TextRoomMessage)m, handler); break; case ROOM_EXIT: onRoomExit((TextRoomMessage)m, handler); break; case ROOM_CLOSED: handler.add(room.setVisible(false)); roomClosed.show(handler); break; case REQUEST_RIGHT_MODERATOR: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_MODERATOR), handler); break; case REQUEST_RIGHT_PRESENTER: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_PRESENTER), handler); break; case REQUEST_RIGHT_WB: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_WB), handler); break; case REQUEST_RIGHT_SHARE: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_SHARE), handler); break; case REQUEST_RIGHT_REMOTE: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_REMOTE), handler); break; case REQUEST_RIGHT_A: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_A), handler); break; case REQUEST_RIGHT_AV: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_AV), handler); break; case REQUEST_RIGHT_MUTE_OTHERS: sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_MUTE_OTHERS), handler); break; case ACTIVITY_REMOVE: sidebar.removeActivity(((TextRoomMessage)m).getText(), handler); break; case HAVE_QUESTION: if (curClient.hasRight(Room.Right.MODERATOR) || getUserId().equals(m.getUserId())) { sidebar.addActivity(new Activity((TextRoomMessage)m, Activity.Type.REQ_RIGHT_HAVE_QUESTION), handler); } break; case KICK: onKick(curClient, (TextRoomMessage)m, handler); break; case MUTE: onMute(curClient, (TextRoomMessage)m, handler); break; case MUTE_OTHERS: onMuteOthers((TextRoomMessage)m, handler); break; case QUICK_POLL_UPDATED: menu.update(handler); handler.appendJavaScript(getQuickPollJs()); break; case KURENTO_STATUS: menu.update(handler); break; case WB_RELOAD: if (Room.Type.INTERVIEW != r.getType()) { wb.reloadWb(handler); } break; case MODERATOR_IN_ROOM: if (!r.isModerated() || !r.isWaitModerator()) { log.warn("Something weird: `moderatorInRoom` in wrong room {}", r); } else if (!curClient.hasRight(Room.Right.MODERATOR)) { boolean moderInRoom = Boolean.TRUE.equals(Boolean.valueOf(((TextRoomMessage)m).getText())); log.warn("!! moderatorInRoom: {}", moderInRoom); if (room.isVisible() != moderInRoom) { handler.add(room.setVisible(moderInRoom)); getMainPanel().getChat().toggle(handler, moderInRoom && !r.isHidden(RoomElement.CHAT)); if (room.isVisible()) { handler.appendJavaScript(roomEnter.getCallbackScript()); handler.add(waitModerator.setVisible(false)); } else { handler.add(waitModerator.setVisible(true)); } } } break; case WB_PUT_FILE: onWbPutFile((TextRoomMessage)m); break; case FILE_TREE_UPDATE: onFileTreeUpdate(handler); break; } } } super.onEvent(event); } private void onRightUpdated(Client curClient, TextRoomMessage m, IPartialPageRequestHandler handler) { String uid = m.getText(); Client c = cm.get(uid); if (c == null) { log.error("Not existing user in rightUpdated {} !!!!", uid); return; } boolean self = curClient.getUid().equals(c.getUid()); StringBuilder sb = new StringBuilder("Room.updateClient(") .append(c.toJson(self).toString(new NullStringer())) .append(");") .append(sendClientsOnUpdate()); handler.appendJavaScript(sb); sidebar.update(handler); menu.update(handler); wb.update(handler); updateInterviewRecordingButtons(handler); } private void onRoomEnter(Client curClient, TextRoomMessage m, IPartialPageRequestHandler handler) { sidebar.update(handler); menu.update(handler); String uid = m.getText(); Client c = cm.get(uid); if (c == null) { log.error("Not existing user in rightUpdated {} !!!!", uid); return; } boolean self = curClient.getUid().equals(c.getUid()); if (self || curClient.hasRight(Room.Right.MODERATOR) || !r.isHidden(RoomElement.USER_COUNT)) { handler.appendJavaScript("Room.addClient([" + c.toJson(self).toString(new NullStringer()) + "]);"); } sidebar.addActivity(new Activity(m, Activity.Type.ROOM_ENTER), handler); } private void onRoomExit(TextRoomMessage m, IPartialPageRequestHandler handler) { String uid = m.getText(); sidebar.update(handler); sidebar.addActivity(new Activity(m, Activity.Type.ROOM_EXIT), handler); handler.appendJavaScript("Room.removeClient('" + uid + "'); Chat.removeTab('" + ID_USER_PREFIX + m.getUserId() + "');"); } private void onKick(Client curClient, TextRoomMessage m, IPartialPageRequestHandler handler) { String uid = m.getText(); if (curClient.getUid().equals(uid)) { handler.add(room.setVisible(false)); getMainPanel().getChat().toggle(handler, false); clientKicked.show(handler); cm.exitRoom(curClient); } } private void onMute(Client curClient, TextRoomMessage m, IPartialPageRequestHandler handler) { JSONObject obj = new JSONObject(m.getText()); Client c = cm.getBySid(obj.getString("sid")); if (c == null) { log.error("Not existing user in mute {} !!!!", obj); return; } if (!curClient.getUid().equals(c.getUid())) { handler.appendJavaScript(String.format("if (typeof(VideoManager) !== 'undefined') {VideoManager.mute('%s', %s);}", obj.getString("uid"), obj.getBoolean("mute"))); } } private void onMuteOthers(TextRoomMessage m, IPartialPageRequestHandler handler) { String uid = m.getText(); Client c = cm.get(uid); if (c == null) { // no luck return; } handler.appendJavaScript(String.format("if (typeof(VideoManager) !== 'undefined') {VideoManager.muteOthers('%s');}", uid)); } private void onWbPutFile(TextRoomMessage m) { JSONObject obj = new JSONObject(m.getText()); getWb().sendFileToWb(fileDao.getAny(obj.getLong("fileId")), obj.getBoolean("clean")); } private void onFileTreeUpdate(IPartialPageRequestHandler handler) { sidebar.getFilesPanel().update(handler); } private String getQuickPollJs() { return String.format("Room.quickPoll(%s);", qpollManager.toJson(r.getId())); } private void updateInterviewRecordingButtons(IPartialPageRequestHandler handler) { Client curClient = getClient(); if (r.isInterview() && curClient.hasRight(Right.MODERATOR)) { if (streamProcessor.isRecording(r.getId())) { handler.appendJavaScript("if (typeof(WbArea) === 'object') {WbArea.setRecStarted(true);}"); } else if (streamProcessor.recordingAllowed(getClient())) { boolean hasStreams = cm.streamByRoom(r.getId()) .anyMatch(cl -> !cl.getStreams().isEmpty()); handler.appendJavaScript(String.format("if (typeof(WbArea) === 'object') {WbArea.setRecStarted(false);WbArea.setRecEnabled(%s);}", hasStreams)); } } } public boolean isModerator(long userId, long roomId) { return isModerator(cm, userId, roomId); } public static boolean isModerator(ClientManager cm, long userId, long roomId) { return hasRight(cm, userId, roomId, Right.MODERATOR); } public static boolean hasRight(ClientManager cm, long userId, long roomId, Right r) { return cm.streamByRoom(roomId) .anyMatch(c -> c.sameUserId(userId) && c.hasRight(r)); } @Override public BasePanel onMenuPanelLoad(IPartialPageRequestHandler handler) { getBasePage().getHeader().setVisible(false); getMainPanel().getTopControls().setVisible(false); Component loader = getBasePage().getLoader().setVisible(false); if (r.isHidden(RoomElement.CHAT) || !isVisible()) { getMainPanel().getChat().toggle(handler, false); } if (handler != null) { handler.add(loader, getBasePage().getHeader(), getMainPanel().getTopControls()); if (isVisible()) { handler.appendJavaScript("Room.load();"); } } return this; } public void show(IPartialPageRequestHandler handler) { getMainPanel().getChat().toggle(handler, !r.isHidden(RoomElement.CHAT)); handler.add(this.setVisible(true)); handler.appendJavaScript("Room.load();"); } @Override public void cleanup(IPartialPageRequestHandler handler) { if (eventDetail instanceof EventDetailDialog evtDialog) { evtDialog.close(handler); } handler.add(getBasePage().getHeader().setVisible(true), getMainPanel().getTopControls().setVisible(true)); if (r.isHidden(RoomElement.CHAT)) { getMainPanel().getChat().toggle(handler, true); } handler.appendJavaScript("if (typeof(Room) !== 'undefined') { Room.unload(); }"); cm.exitRoom(getClient()); getMainPanel().getChat().roomExit(r, handler); } @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); response.render(new PriorityHeaderItem(JavaScriptHeaderItem.forUrl("js/room.js")) { private static final long serialVersionUID = 1L; @Override public List<HeaderItem> getDependencies() { return List.of( VIDEO_SETTINGS_JS, new PriorityHeaderItem(JavaScriptHeaderItem.forUrl("js/wb.js")) , new PriorityHeaderItem(JavaScriptHeaderItem.forReference(JQueryUILibrarySettings.get().getJavaScriptReference()))); } }); response.render(JavaScriptHeaderItem.forReference(TouchPunchResourceReference.instance())); if (room.isVisible()) { response.render(OnDomReadyHeaderItem.forScript(roomEnter.getCallbackScript())); } } public void requestRight(Right right, IPartialPageRequestHandler handler) { RoomMessage.Type reqType = null; if (noModerators()) { if (r.isModerated()) { showIdeaAlert(handler, getString("696")); return; } else { // we found no-one we can ask, allow right rightsUpdated(cm.update(getClient().allow(right))); } } // ask switch (right) { case MODERATOR: reqType = Type.REQUEST_RIGHT_MODERATOR; break; case PRESENTER: reqType = Type.REQUEST_RIGHT_PRESENTER; break; case WHITEBOARD: reqType = Type.REQUEST_RIGHT_WB; break; case SHARE: reqType = Type.REQUEST_RIGHT_SHARE; break; case AUDIO: reqType = Type.REQUEST_RIGHT_A; break; case MUTE_OTHERS: reqType = Type.REQUEST_RIGHT_MUTE_OTHERS; break; case REMOTE_CONTROL: reqType = Type.REQUEST_RIGHT_REMOTE; break; case VIDEO: reqType = Type.REQUEST_RIGHT_AV; break; default: break; } if (reqType != null) { WebSocketHelper.sendRoom(new TextRoomMessage(getRoom().getId(), getClient(), reqType, getClient().getUid())); } } public void allowRight(Client client, Right... rights) { rightsUpdated(client.allow(rights)); } public void denyRight(Client client, Right... rights) { for (Right right : rights) { client.deny(right); } client.getCamStreams().forEach(sd -> { if (sd.has(Client.Activity.AUDIO) && !client.hasRight(Right.AUDIO)) { sd.remove(Client.Activity.AUDIO); } if (sd.has(Client.Activity.VIDEO) && !client.hasRight(Right.VIDEO)) { sd.remove(Client.Activity.VIDEO); } }); rightsUpdated(client); } public void rightsUpdated(Client c) { cm.update(c); streamProcessor.rightsUpdated(c); } public void broadcast(Client client) { cm.update(client); WebSocketHelper.sendRoom(new TextRoomMessage(getRoom().getId(), getClient(), RoomMessage.Type.RIGHT_UPDATED, client.getUid())); } @Override protected void process(IPartialPageRequestHandler handler, JSONObject o) throws IOException { if (room.isVisible() && "room".equals(o.optString("area"))) { final String type = o.optString("type"); if ("wb".equals(type)) { WbAction a = WbAction.of(o.getString(PARAM_ACTION)); wb.processWbAction(a, o.optJSONObject("data"), handler); } else if ("room".equals(type)) { sidebar.roomAction(handler, o); } else if ("av".equals(type)) { ExtendedClientProperties cp = WebSession.get().getExtendedProperties(); Client c = cp.setSettings(o.optJSONObject("settings")).update(getClient()); if (!avInited) { avInited = true; if (Room.Type.CONFERENCE == r.getType()) { if (!c.isAllowed(Client.Activity.AUDIO)) { c.allow(Room.Right.AUDIO); } if (!c.getRoom().isAudioOnly() && !c.isAllowed(Client.Activity.VIDEO)) { c.allow(Room.Right.VIDEO); } streamProcessor.onToggleActivity(c, c.getRoom().isAudioOnly() ? Client.Activity.AUDIO : Client.Activity.AUDIO_VIDEO); } } c.getCamStreams().forEach(sd -> { sd.setWidth(c.getWidth()); sd.setHeight(c.getHeight()); }); broadcast(c); } } } public Room getRoom() { return r; } public Client getClient() { return getMainPanel().getClient(); } public String getUid() { return getMainPanel().getUid(); } public boolean screenShareAllowed() { Client c = getClient(); return c.getScreenStream().isPresent() || streamProcessor.screenShareAllowed(c); } public RoomSidebar getSidebar() { return sidebar; } public AbstractWbPanel getWb() { return wb; } public String getPublishingUser() { return null; } public boolean isInterview() { return r.isInterview(); } private void createWaitModerator(final boolean autoopen) { waitModerator = new Alert("wait-moderator", new ResourceModel("wait-moderator.message"), new ResourceModel("wait-moderator.title")) { private static final long serialVersionUID = 1L; @Override protected Component createMessage(String markupId, IModel<String> message) { return super.createMessage(markupId, message).setEscapeModelStrings(false); } }; waitModerator.type(Alert.Type.Warning).setCloseButtonVisible(false); waitModerator.setOutputMarkupId(true).setOutputMarkupPlaceholderTag(true).setVisible(autoopen); } @Override protected String getCssClass() { String clazz = "room " + r.getType().name(); if (r.isHidden(RoomElement.TOP_BAR)) { clazz += " no-menu"; } if (r.isHidden(RoomElement.ACTIVITIES)) { clazz += " no-activities"; } if (r.isHidden(RoomElement.CHAT)) { clazz += " no-chat"; } if (!r.isHidden(RoomElement.MICROPHONE_STATUS)) { clazz += " mic-status"; } return clazz; } private void showIdeaAlert(IPartialPageRequestHandler handler, String msg) { showAlert(handler, "info", msg, "far fa-lightbulb"); } private void showAlert(IPartialPageRequestHandler handler, String type, String msg, String icon) { handler.appendJavaScript("OmUtil.alert('" + type + "', '<i class=\"" + icon + "\"></i>&nbsp;" + StringEscapeUtils.escapeEcmaScript(msg) + "', 10000)"); } private CharSequence createAddClientJs(Client c) { JSONArray arr = new JSONArray(); cm.streamByRoom(r.getId()).forEach(cl -> arr.put(cl.toJson(c.getUid().equals(cl.getUid())))); return new StringBuilder() .append("Room.addClient(") .append(arr.toString(new NullStringer())) .append(");"); } private CharSequence sendClientsOnUpdate() { Client c = getClient(); StringBuilder res = new StringBuilder(); if (r.isHidden(RoomElement.USER_COUNT)) { if (c.hasRight(Room.Right.MODERATOR)) { res.append(createAddClientJs(c)); } else { res.append("Room.removeOthers();"); } } return res; } private boolean noModerators() { return cm.streamByRoom(r.getId()) .filter(cl -> cl.hasRight(Room.Right.MODERATOR)) .findAny() .isEmpty(); } }
apache/ozone
35,616
hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/ContainerEndpoint.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.recon.api; import static org.apache.hadoop.ozone.recon.ReconConstants.DEFAULT_FETCH_COUNT; import static org.apache.hadoop.ozone.recon.ReconConstants.DEFAULT_FILTER_FOR_MISSING_CONTAINERS; import static org.apache.hadoop.ozone.recon.ReconConstants.PREV_CONTAINER_ID_DEFAULT_VALUE; import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_QUERY_FILTER; import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_QUERY_LIMIT; import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_QUERY_MAX_CONTAINER_ID; import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_QUERY_MIN_CONTAINER_ID; import static org.apache.hadoop.ozone.recon.ReconConstants.RECON_QUERY_PREVKEY; import java.io.IOException; import java.time.Instant; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; import javax.inject.Inject; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.scm.container.ContainerID; import org.apache.hadoop.hdds.scm.container.ContainerInfo; import org.apache.hadoop.hdds.scm.pipeline.Pipeline; import org.apache.hadoop.hdds.scm.pipeline.PipelineID; import org.apache.hadoop.hdds.scm.pipeline.PipelineManager; import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; import org.apache.hadoop.ozone.om.helpers.BucketLayout; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyLocationInfoGroup; import org.apache.hadoop.ozone.recon.ReconUtils; import org.apache.hadoop.ozone.recon.api.types.ContainerDiscrepancyInfo; import org.apache.hadoop.ozone.recon.api.types.ContainerKeyPrefix; import org.apache.hadoop.ozone.recon.api.types.ContainerMetadata; import org.apache.hadoop.ozone.recon.api.types.ContainersResponse; import org.apache.hadoop.ozone.recon.api.types.DeletedContainerInfo; import org.apache.hadoop.ozone.recon.api.types.KeyMetadata; import org.apache.hadoop.ozone.recon.api.types.KeyMetadata.ContainerBlockMetadata; import org.apache.hadoop.ozone.recon.api.types.KeysResponse; import org.apache.hadoop.ozone.recon.api.types.MissingContainerMetadata; import org.apache.hadoop.ozone.recon.api.types.MissingContainersResponse; import org.apache.hadoop.ozone.recon.api.types.UnhealthyContainerMetadata; import org.apache.hadoop.ozone.recon.api.types.UnhealthyContainersResponse; import org.apache.hadoop.ozone.recon.api.types.UnhealthyContainersSummary; import org.apache.hadoop.ozone.recon.persistence.ContainerHealthSchemaManager; import org.apache.hadoop.ozone.recon.persistence.ContainerHistory; import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager; import org.apache.hadoop.ozone.recon.scm.ReconContainerManager; import org.apache.hadoop.ozone.recon.spi.ReconContainerMetadataManager; import org.apache.hadoop.ozone.recon.spi.ReconNamespaceSummaryManager; import org.apache.hadoop.ozone.util.SeekableIterator; import org.apache.ozone.recon.schema.ContainerSchemaDefinition.UnHealthyContainerStates; import org.apache.ozone.recon.schema.generated.tables.pojos.UnhealthyContainers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Endpoint for querying keys that belong to a container. */ @Path("/containers") @Produces(MediaType.APPLICATION_JSON) @AdminOnly public class ContainerEndpoint { private ReconContainerMetadataManager reconContainerMetadataManager; private ReconOMMetadataManager omMetadataManager; private final ReconContainerManager containerManager; private final PipelineManager pipelineManager; private final ContainerHealthSchemaManager containerHealthSchemaManager; private final ReconNamespaceSummaryManager reconNamespaceSummaryManager; private final OzoneStorageContainerManager reconSCM; private static final Logger LOG = LoggerFactory.getLogger(ContainerEndpoint.class); private BucketLayout layout = BucketLayout.DEFAULT; /** * Enumeration representing different data filters. * Each filter has an associated value. */ public enum DataFilter { SCM("SCM"), // Filter for SCM OM("OM"); // Filter for OM private final String value; DataFilter(String value) { this.value = value; } public String getValue() { return value; } /** * Convert a String value to the corresponding DataFilter enum constant. * The comparison is case-insensitive. */ public static DataFilter fromValue(String value) { for (DataFilter filter : DataFilter.values()) { if (filter.getValue().equalsIgnoreCase(value)) { return filter; } } throw new IllegalArgumentException("Invalid DataFilter value: " + value); } } @Inject public ContainerEndpoint(OzoneStorageContainerManager reconSCM, ContainerHealthSchemaManager containerHealthSchemaManager, ReconNamespaceSummaryManager reconNamespaceSummaryManager, ReconContainerMetadataManager reconContainerMetadataManager, ReconOMMetadataManager omMetadataManager) { this.containerManager = (ReconContainerManager) reconSCM.getContainerManager(); this.pipelineManager = reconSCM.getPipelineManager(); this.containerHealthSchemaManager = containerHealthSchemaManager; this.reconNamespaceSummaryManager = reconNamespaceSummaryManager; this.reconSCM = reconSCM; this.reconContainerMetadataManager = reconContainerMetadataManager; this.omMetadataManager = omMetadataManager; } /** * Return {@code org.apache.hadoop.hdds.scm.container} * for the containers starting from the given "prev-key" query param for the * given "limit". The given "prev-key" is skipped from the results returned. * * @param prevKey the containerID after which results are returned. * start containerID, &gt;=0, * start searching at the head if 0. * @param limit max no. of containers to get. * count must be &gt;= 0 * Usually the count will be replace with a very big * value instead of being unlimited in case the db is very big. * @return {@link Response} */ @GET public Response getContainers( @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_PREVKEY) long prevKey) { if (limit < 0 || prevKey < 0) { // Send back an empty response return Response.status(Response.Status.NOT_ACCEPTABLE).build(); } long containersCount; List<ContainerMetadata> containerMetaDataList = // Get the containers starting from the prevKey+1 which will skip the // container having prevKey ID containerManager.getContainers(ContainerID.valueOf(prevKey + 1), limit) .stream() .map(container -> { ContainerMetadata containerMetadata = new ContainerMetadata(container.getContainerID()); containerMetadata.setNumberOfKeys(container.getNumberOfKeys()); return containerMetadata; }) .collect(Collectors.toList()); containersCount = containerMetaDataList.size(); // Get the last container ID from the List long lastContainerID = containerMetaDataList.isEmpty() ? prevKey : containerMetaDataList.get(containerMetaDataList.size() - 1) .getContainerID(); ContainersResponse containersResponse = new ContainersResponse(containersCount, containerMetaDataList, lastContainerID); return Response.ok(containersResponse).build(); } /** * Return @{@link org.apache.hadoop.ozone.recon.api.types.KeyMetadata} for * all keys that belong to the container identified by the id param * starting from the given "prev-key" query param for the given "limit". * The given prevKeyPrefix is skipped from the results returned. * * @param containerID the given containerID. * @param limit max no. of keys to get. * @param prevKeyPrefix the key prefix after which results are returned. * @return {@link Response} */ @GET @Path("/{id}/keys") public Response getKeysForContainer( @PathParam("id") Long containerID, @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(StringUtils.EMPTY) @QueryParam(RECON_QUERY_PREVKEY) String prevKeyPrefix) { Map<String, KeyMetadata> keyMetadataMap = new LinkedHashMap<>(); // Total count of keys in the container. long totalCount; // Last key prefix to be used for pagination. It will be exposed in the response. String lastKey = ""; // If -1 is passed, set limit to the maximum integer value to retrieve all records if (limit == -1) { limit = Integer.MAX_VALUE; } try { Map<ContainerKeyPrefix, Integer> containerKeyPrefixMap = reconContainerMetadataManager.getKeyPrefixesForContainer(containerID, prevKeyPrefix, limit); // Get set of Container-Key mappings for given containerId. for (ContainerKeyPrefix containerKeyPrefix : containerKeyPrefixMap .keySet()) { // Directly calling getSkipCache() on the Key/FileTable table // instead of iterating since only full keys are supported now. We will // try to get the OmKeyInfo object by searching the KEY_TABLE table with // the key prefix. If it's not found, we will then search the FILE_TABLE OmKeyInfo omKeyInfo = omMetadataManager.getKeyTable(BucketLayout.LEGACY) .getSkipCache(containerKeyPrefix.getKeyPrefix()); if (omKeyInfo == null) { omKeyInfo = omMetadataManager.getKeyTable(BucketLayout.FILE_SYSTEM_OPTIMIZED) .getSkipCache(containerKeyPrefix.getKeyPrefix()); } if (null != omKeyInfo) { // Filter keys by version. List<OmKeyLocationInfoGroup> matchedKeys = omKeyInfo .getKeyLocationVersions() .stream() .filter(k -> (k.getVersion() == containerKeyPrefix.getKeyVersion())) .collect(Collectors.toList()); List<ContainerBlockMetadata> blockIds = getBlocks(matchedKeys, containerID); String ozoneKey = containerKeyPrefix.getKeyPrefix(); lastKey = ozoneKey; if (keyMetadataMap.containsKey(ozoneKey)) { keyMetadataMap.get(ozoneKey).getVersions() .add(containerKeyPrefix.getKeyVersion()); keyMetadataMap.get(ozoneKey).getBlockIds() .put(containerKeyPrefix.getKeyVersion(), blockIds); } else { KeyMetadata keyMetadata = new KeyMetadata(); keyMetadata.setBucket(omKeyInfo.getBucketName()); keyMetadata.setVolume(omKeyInfo.getVolumeName()); keyMetadata.setKey(omKeyInfo.getKeyName()); keyMetadata.setCompletePath(ReconUtils.constructFullPath(omKeyInfo, reconNamespaceSummaryManager)); keyMetadata.setCreationTime( Instant.ofEpochMilli(omKeyInfo.getCreationTime())); keyMetadata.setModificationTime( Instant.ofEpochMilli(omKeyInfo.getModificationTime())); keyMetadata.setDataSize(omKeyInfo.getDataSize()); keyMetadata.getVersions().add(containerKeyPrefix.getKeyVersion()); keyMetadataMap.put(ozoneKey, keyMetadata); keyMetadata.getBlockIds().put(containerKeyPrefix.getKeyVersion(), blockIds); } } } totalCount = reconContainerMetadataManager.getKeyCountForContainer(containerID); } catch (IOException ioEx) { throw new WebApplicationException(ioEx, Response.Status.INTERNAL_SERVER_ERROR); } KeysResponse keysResponse = new KeysResponse(totalCount, keyMetadataMap.values(), lastKey); return Response.ok(keysResponse).build(); } /** * Return Container replica history for the container identified by the id * param. * * @param containerID the given containerID. * @return {@link Response} */ @GET @Path("/{id}/replicaHistory") public Response getReplicaHistoryForContainer( @PathParam("id") Long containerID) { return Response.ok( containerManager.getAllContainerHistory(containerID)).build(); } /** * Return * {@link org.apache.hadoop.ozone.recon.api.types.MissingContainerMetadata} * for all missing containers. * * @param limit The limit of missing containers to return. * @return {@link Response} */ @GET @Path("/missing") @Deprecated public Response getMissingContainers( @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit ) { List<MissingContainerMetadata> missingContainers = new ArrayList<>(); containerHealthSchemaManager.getUnhealthyContainers( UnHealthyContainerStates.MISSING, 0L, Optional.empty(), limit) .forEach(container -> { long containerID = container.getContainerId(); try { ContainerInfo containerInfo = containerManager.getContainer(ContainerID.valueOf(containerID)); long keyCount = containerInfo.getNumberOfKeys(); UUID pipelineID = containerInfo.getPipelineID().getId(); List<ContainerHistory> datanodes = containerManager.getLatestContainerHistory(containerID, containerInfo.getReplicationConfig().getRequiredNodes()); missingContainers.add(new MissingContainerMetadata(containerID, container.getInStateSince(), keyCount, pipelineID, datanodes)); } catch (IOException ioEx) { throw new WebApplicationException(ioEx, Response.Status.INTERNAL_SERVER_ERROR); } }); MissingContainersResponse response = new MissingContainersResponse(missingContainers.size(), missingContainers); return Response.ok(response).build(); } /** * Return * {@link org.apache.hadoop.ozone.recon.api.types.UnhealthyContainerMetadata} * for all unhealthy containers. * * @param state Return only containers matching the given unhealthy state, * eg UNDER_REPLICATED, MIS_REPLICATED, OVER_REPLICATED or * MISSING. Passing null returns all containers. * @param limit The limit of unhealthy containers to return. * @param maxContainerId Upper bound for container IDs to include (exclusive). * When specified, returns containers with IDs less than this value * in descending order. Use for backward pagination. * @param minContainerId Lower bound for container IDs to include (exclusive). * When maxContainerId is not specified, returns containers with IDs * greater than this value in ascending order. Use for forward pagination. * @return {@link Response} */ @GET @Path("/unhealthy/{state}") public Response getUnhealthyContainers( @PathParam("state") String state, @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_MAX_CONTAINER_ID) long maxContainerId, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_MIN_CONTAINER_ID) long minContainerId) { Optional<Long> maxContainerIdOpt = maxContainerId > 0 ? Optional.of(maxContainerId) : Optional.empty(); List<UnhealthyContainerMetadata> unhealthyMeta = new ArrayList<>(); List<UnhealthyContainersSummary> summary; try { UnHealthyContainerStates internalState = null; if (state != null) { // If an invalid state is passed in, this will throw // illegalArgumentException and fail the request internalState = UnHealthyContainerStates.valueOf(state); } summary = containerHealthSchemaManager.getUnhealthyContainersSummary(); List<UnhealthyContainers> containers = containerHealthSchemaManager .getUnhealthyContainers(internalState, minContainerId, maxContainerIdOpt, limit); // Filtering out EMPTY_MISSING and NEGATIVE_SIZE containers from the response. // These container states are not being inserted into the database as they represent // edge cases that are not critical to track as unhealthy containers. List<UnhealthyContainers> filteredContainers = containers.stream() .filter(container -> !container.getContainerState() .equals(UnHealthyContainerStates.EMPTY_MISSING.toString()) && !container.getContainerState() .equals(UnHealthyContainerStates.NEGATIVE_SIZE.toString())) .collect(Collectors.toList()); for (UnhealthyContainers c : filteredContainers) { long containerID = c.getContainerId(); ContainerInfo containerInfo = containerManager.getContainer(ContainerID.valueOf(containerID)); long keyCount = containerInfo.getNumberOfKeys(); UUID pipelineID = containerInfo.getPipelineID().getId(); List<ContainerHistory> datanodes = containerManager.getLatestContainerHistory(containerID, containerInfo.getReplicationConfig().getRequiredNodes()); unhealthyMeta.add(new UnhealthyContainerMetadata( c, datanodes, pipelineID, keyCount)); } } catch (IOException ex) { throw new WebApplicationException(ex, Response.Status.INTERNAL_SERVER_ERROR); } catch (IllegalArgumentException e) { throw new WebApplicationException(e, Response.Status.BAD_REQUEST); } UnhealthyContainersResponse response = new UnhealthyContainersResponse(unhealthyMeta); if (!unhealthyMeta.isEmpty()) { response.setFirstKey(unhealthyMeta.stream().map(UnhealthyContainerMetadata::getContainerID) .min(Long::compareTo).orElse(0L)); response.setLastKey(unhealthyMeta.stream().map(UnhealthyContainerMetadata::getContainerID) .max(Long::compareTo).orElse(0L)); } for (UnhealthyContainersSummary s : summary) { response.setSummaryCount(s.getContainerState(), s.getCount()); } return Response.ok(response).build(); } /** * Return * {@link org.apache.hadoop.ozone.recon.api.types.UnhealthyContainerMetadata} * for all unhealthy containers. * @param limit The limit of unhealthy containers to return. * @param maxContainerId Upper bound for container IDs to include (exclusive). * When specified, returns containers with IDs less than this value * in descending order. Use for backward pagination. * @param minContainerId Lower bound for container IDs to include (exclusive). * When maxContainerId is not specified, returns containers with IDs * greater than this value in ascending order. Use for forward pagination. * @return {@link Response} */ @GET @Path("/unhealthy") public Response getUnhealthyContainers( @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_MAX_CONTAINER_ID) long maxContainerId, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_MIN_CONTAINER_ID) long minContainerId) { return getUnhealthyContainers(null, limit, maxContainerId, minContainerId); } /** * This API will return all DELETED containers in SCM in below JSON format. * { * containers: [ * { * containerId: 1, * state: DELETED, * pipelineId: "a10ffab6-8ed5-414a-aaf5-79890ff3e8a1", * numOfKeys: 3, * inStateSince: <stateEnterTime> * }, * { * containerId: 2, * state: DELETED, * pipelineId: "a10ffab6-8ed5-414a-aaf5-79890ff3e8a1", * numOfKeys: 6, * inStateSince: <stateEnterTime> * } * ] * } * @param limit limits the number of deleted containers * @param prevKey previous container Id to skip * @return Response of deleted containers. */ @GET @Path("/deleted") public Response getSCMDeletedContainers( @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_PREVKEY) long prevKey) { List<DeletedContainerInfo> deletedContainerInfoList = new ArrayList<>(); try { List<ContainerInfo> containers = containerManager.getContainers(ContainerID.valueOf(prevKey), limit, HddsProtos.LifeCycleState.DELETED); containers = containers.stream() .filter(containerInfo -> !(containerInfo.getContainerID() == prevKey)) .collect( Collectors.toList()); containers.forEach(containerInfo -> { DeletedContainerInfo deletedContainerInfo = new DeletedContainerInfo(); deletedContainerInfo.setContainerID(containerInfo.getContainerID()); deletedContainerInfo.setPipelineID(containerInfo.getPipelineID()); deletedContainerInfo.setNumberOfKeys(containerInfo.getNumberOfKeys()); deletedContainerInfo.setContainerState(containerInfo.getState().name()); deletedContainerInfo.setStateEnterTime( containerInfo.getStateEnterTime().toEpochMilli()); deletedContainerInfo.setLastUsed( containerInfo.getLastUsed().toEpochMilli()); deletedContainerInfo.setUsedBytes(containerInfo.getUsedBytes()); deletedContainerInfo.setReplicationConfig( containerInfo.getReplicationConfig()); deletedContainerInfo.setReplicationFactor( containerInfo.getReplicationFactor().name()); deletedContainerInfoList.add(deletedContainerInfo); }); } catch (IllegalArgumentException e) { throw new WebApplicationException(e, Response.Status.BAD_REQUEST); } catch (Exception ex) { throw new WebApplicationException(ex, Response.Status.INTERNAL_SERVER_ERROR); } return Response.ok(deletedContainerInfoList).build(); } /** * Helper function to extract the blocks for a given container from a given * OM Key. * * @param matchedKeys List of OM Key Info locations * @param containerID containerId. * @return List of blocks. */ private List<ContainerBlockMetadata> getBlocks( List<OmKeyLocationInfoGroup> matchedKeys, long containerID) { List<ContainerBlockMetadata> blockIds = new ArrayList<>(); for (OmKeyLocationInfoGroup omKeyLocationInfoGroup : matchedKeys) { List<OmKeyLocationInfo> omKeyLocationInfos = omKeyLocationInfoGroup .getLocationList() .stream() .filter(c -> c.getContainerID() == containerID) .collect(Collectors.toList()); for (OmKeyLocationInfo omKeyLocationInfo : omKeyLocationInfos) { blockIds.add(new ContainerBlockMetadata(omKeyLocationInfo .getContainerID(), omKeyLocationInfo.getLocalID())); } } return blockIds; } /** * Retrieves the container mismatch insights. * * This method returns a list of ContainerDiscrepancyInfo objects representing * the containers that are missing in either the Ozone Manager (OM) or the * Storage Container Manager (SCM), based on the provided filter parameter. * The returned list is paginated based on the provided limit and prevKey * parameters. * * @param limit The maximum number of container discrepancies to return. * @param prevKey The container ID after which the results are returned. * @param missingIn The missing filter parameter to specify if it's * "OM" or "SCM" missing containers to be returned. */ @GET @Path("/mismatch") public Response getContainerMisMatchInsights( @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_PREVKEY) long prevKey, @DefaultValue(DEFAULT_FILTER_FOR_MISSING_CONTAINERS) @QueryParam(RECON_QUERY_FILTER) String missingIn) { if (prevKey < 0 || limit < 0) { // Send back an empty response return Response.status(Response.Status.NOT_ACCEPTABLE).build(); } List<ContainerDiscrepancyInfo> containerDiscrepancyInfoList = new ArrayList<>(); Long minContainerID = prevKey + 1; Iterator<ContainerInfo> scmNonDeletedContainers = containerManager.getContainers().stream() .filter(containerInfo -> (containerInfo.getContainerID() >= minContainerID)) .filter(containerInfo -> containerInfo.getState() != HddsProtos.LifeCycleState.DELETED) .sorted(Comparator.comparingLong(ContainerInfo::getContainerID)).iterator(); ContainerInfo scmContainerInfo = scmNonDeletedContainers.hasNext() ? scmNonDeletedContainers.next() : null; DataFilter dataFilter = DataFilter.fromValue(missingIn.toUpperCase()); try (SeekableIterator<Long, ContainerMetadata> omContainers = reconContainerMetadataManager.getContainersIterator()) { omContainers.seek(minContainerID); ContainerMetadata containerMetadata = omContainers.hasNext() ? omContainers.next() : null; switch (dataFilter) { case SCM: List<ContainerMetadata> notSCMContainers = new ArrayList<>(); while (containerMetadata != null && notSCMContainers.size() < limit) { Long omContainerID = containerMetadata.getContainerID(); Long scmContainerID = scmContainerInfo == null ? null : scmContainerInfo.getContainerID(); if (omContainerID.equals(scmContainerID)) { containerMetadata = omContainers.hasNext() ? omContainers.next() : null; scmContainerInfo = scmNonDeletedContainers.hasNext() ? scmNonDeletedContainers.next() : null; } else if (scmContainerID == null || omContainerID.compareTo(scmContainerID) < 0) { notSCMContainers.add(containerMetadata); containerMetadata = omContainers.hasNext() ? omContainers.next() : null; } else { scmContainerInfo = scmNonDeletedContainers.hasNext() ? scmNonDeletedContainers.next() : null; } } notSCMContainers.forEach(nonSCMContainer -> { ContainerDiscrepancyInfo containerDiscrepancyInfo = new ContainerDiscrepancyInfo(); containerDiscrepancyInfo.setContainerID(nonSCMContainer.getContainerID()); containerDiscrepancyInfo.setNumberOfKeys( nonSCMContainer.getNumberOfKeys()); containerDiscrepancyInfo.setPipelines( nonSCMContainer.getPipelines()); containerDiscrepancyInfo.setExistsAt("OM"); containerDiscrepancyInfoList.add(containerDiscrepancyInfo); }); break; case OM: List<ContainerInfo> nonOMContainers = new ArrayList<>(); while (scmContainerInfo != null && nonOMContainers.size() < limit) { Long omContainerID = containerMetadata == null ? null : containerMetadata.getContainerID(); Long scmContainerID = scmContainerInfo.getContainerID(); if (scmContainerID.equals(omContainerID)) { scmContainerInfo = scmNonDeletedContainers.hasNext() ? scmNonDeletedContainers.next() : null; containerMetadata = omContainers.hasNext() ? omContainers.next() : null; } else if (omContainerID == null || scmContainerID.compareTo(omContainerID) < 0) { nonOMContainers.add(scmContainerInfo); scmContainerInfo = scmNonDeletedContainers.hasNext() ? scmNonDeletedContainers.next() : null; } else { //Seeking directly to SCM containerId sequential read is just wasteful here if there are too many values // to be read in b/w omContainerID & scmContainerID since (omContainerId<scmContainerID) omContainers.seek(scmContainerID); containerMetadata = omContainers.hasNext() ? omContainers.next() : null; } } List<Pipeline> pipelines = new ArrayList<>(); nonOMContainers.forEach(containerInfo -> { ContainerDiscrepancyInfo containerDiscrepancyInfo = new ContainerDiscrepancyInfo(); containerDiscrepancyInfo.setContainerID(containerInfo.getContainerID()); containerDiscrepancyInfo.setNumberOfKeys(0); PipelineID pipelineID = null; try { pipelineID = containerInfo.getPipelineID(); if (pipelineID != null) { pipelines.add(pipelineManager.getPipeline(pipelineID)); } } catch (PipelineNotFoundException e) { LOG.debug( "Pipeline not found for container: {} and pipelineId: {}", containerInfo, pipelineID, e); } containerDiscrepancyInfo.setPipelines(pipelines); containerDiscrepancyInfo.setExistsAt("SCM"); containerDiscrepancyInfoList.add(containerDiscrepancyInfo); }); break; default: // Invalid filter parameter value return Response.status(Response.Status.BAD_REQUEST).build(); } } catch (IllegalArgumentException e) { throw new WebApplicationException(e, Response.Status.BAD_REQUEST); } catch (Exception ex) { throw new WebApplicationException(ex, Response.Status.INTERNAL_SERVER_ERROR); } Map<String, Object> response = new HashMap<>(); if (!containerDiscrepancyInfoList.isEmpty()) { response.put("lastKey", containerDiscrepancyInfoList.get( containerDiscrepancyInfoList.size() - 1).getContainerID()); } else { response.put("lastKey", null); } response.put("containerDiscrepancyInfo", containerDiscrepancyInfoList); return Response.ok(response).build(); } /** * This API retrieves set of deleted containers in SCM which are present * in OM to find out list of keys mapped to such DELETED state containers. * * limit - limits the number of such SCM DELETED containers present in OM. * prevKey - Skip containers till it seeks correctly to the previous * containerId. * Sample API Response: * [ * { * "containerId": 2, * "numberOfKeys": 2, * "pipelines": [] * } * ] */ @GET @Path("/mismatch/deleted") public Response getOmContainersDeletedInSCM( @DefaultValue(DEFAULT_FETCH_COUNT) @QueryParam(RECON_QUERY_LIMIT) int limit, @DefaultValue(PREV_CONTAINER_ID_DEFAULT_VALUE) @QueryParam(RECON_QUERY_PREVKEY) long prevKey) { if (prevKey < 0) { // Send back an empty response return Response.status(Response.Status.NOT_ACCEPTABLE).build(); } if (limit <= 0) { limit = Integer.MAX_VALUE; } long minContainerID = prevKey + 1; Iterator<ContainerInfo> deletedStateSCMContainers = containerManager.getContainers().stream() .filter(containerInfo -> containerInfo.getContainerID() >= minContainerID) .filter(containerInfo -> containerInfo.getState() == HddsProtos.LifeCycleState.DELETED) .sorted(Comparator.comparingLong(ContainerInfo::getContainerID)).iterator(); List<ContainerDiscrepancyInfo> containerDiscrepancyInfoList; try (SeekableIterator<Long, ContainerMetadata> omContainers = reconContainerMetadataManager.getContainersIterator()) { ContainerInfo scmContainerInfo = deletedStateSCMContainers.hasNext() ? deletedStateSCMContainers.next() : null; ContainerMetadata containerMetadata = omContainers.hasNext() ? omContainers.next() : null; List<ContainerMetadata> omContainersDeletedInSCM = new ArrayList<>(); while (containerMetadata != null && scmContainerInfo != null && omContainersDeletedInSCM.size() < limit) { Long omContainerID = containerMetadata.getContainerID(); Long scmContainerID = scmContainerInfo.getContainerID(); if (scmContainerID.equals(omContainerID)) { omContainersDeletedInSCM.add(containerMetadata); scmContainerInfo = deletedStateSCMContainers.hasNext() ? deletedStateSCMContainers.next() : null; containerMetadata = omContainers.hasNext() ? omContainers.next() : null; } else if (scmContainerID.compareTo(omContainerID) < 0) { scmContainerInfo = deletedStateSCMContainers.hasNext() ? deletedStateSCMContainers.next() : null; } else { // Seek directly to scmContainerId iterating sequentially is very wasteful here. omContainers.seek(scmContainerID); containerMetadata = omContainers.hasNext() ? omContainers.next() : null; } } containerDiscrepancyInfoList = omContainersDeletedInSCM.stream().map(containerMetadataEntry -> { ContainerDiscrepancyInfo containerDiscrepancyInfo = new ContainerDiscrepancyInfo(); containerDiscrepancyInfo.setContainerID(containerMetadataEntry.getContainerID()); containerDiscrepancyInfo.setNumberOfKeys(containerMetadataEntry.getNumberOfKeys()); containerDiscrepancyInfo.setPipelines(containerMetadataEntry.getPipelines()); return containerDiscrepancyInfo; }).collect(Collectors.toList()); } catch (IllegalArgumentException e) { throw new WebApplicationException(e, Response.Status.BAD_REQUEST); } catch (Exception ex) { throw new WebApplicationException(ex, Response.Status.INTERNAL_SERVER_ERROR); } Map<String, Object> response = new HashMap<>(); if (!containerDiscrepancyInfoList.isEmpty()) { response.put("lastKey", containerDiscrepancyInfoList.get( containerDiscrepancyInfoList.size() - 1).getContainerID()); } else { response.put("lastKey", null); } response.put("containerDiscrepancyInfo", containerDiscrepancyInfoList); return Response.ok(response).build(); } }
apache/fineract
35,725
fineract-core/src/main/java/org/apache/fineract/portfolio/savings/data/SavingsAccountTransactionData.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.fineract.portfolio.savings.data; import java.io.Serializable; import java.math.BigDecimal; import java.time.LocalDate; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import lombok.Getter; import org.apache.fineract.infrastructure.core.domain.LocalDateInterval; import org.apache.fineract.infrastructure.core.jersey.serializer.legacy.JsonLocalDateArrayFormat; import org.apache.fineract.infrastructure.core.service.DateUtils; import org.apache.fineract.organisation.monetary.data.CurrencyData; import org.apache.fineract.organisation.monetary.domain.MonetaryCurrency; import org.apache.fineract.organisation.monetary.domain.Money; import org.apache.fineract.portfolio.TransactionEntryType; import org.apache.fineract.portfolio.account.data.AccountTransferData; import org.apache.fineract.portfolio.paymentdetail.data.PaymentDetailData; import org.apache.fineract.portfolio.paymenttype.data.PaymentTypeData; import org.apache.fineract.portfolio.savings.SavingsAccountTransactionType; import org.apache.fineract.portfolio.savings.domain.SavingsAccountChargesPaidByData; import org.apache.fineract.portfolio.savings.domain.interest.EndOfDayBalance; import org.apache.fineract.portfolio.savings.service.SavingsEnumerations; import org.apache.fineract.portfolio.tax.data.TaxComponentData; import org.apache.fineract.portfolio.tax.data.TaxDetailsData; import org.springframework.util.CollectionUtils; /** * Immutable data object representing a savings account transaction. */ @Getter @JsonLocalDateArrayFormat public final class SavingsAccountTransactionData implements Serializable { private Long id; private final SavingsAccountTransactionEnumData transactionType; private final TransactionEntryType entryType; private final Long accountId; private final String accountNo; private final LocalDate date; private final CurrencyData currency; private final PaymentDetailData paymentDetailData; private final BigDecimal amount; private final BigDecimal outstandingChargeAmount; private BigDecimal runningBalance; private boolean reversed; private final AccountTransferData transfer; private final LocalDate submittedOnDate; private final boolean interestedPostedAsOn; private final String submittedByUsername; private String note; private final boolean isManualTransaction; private final Boolean isReversal; private final Long originalTransactionId; private final Boolean lienTransaction; private final Long releaseTransactionId; private final String reasonForBlock; private final Set<SavingsAccountChargesPaidByData> chargesPaidByData = new HashSet<>(); // templates private final Collection<PaymentTypeData> paymentTypeOptions; // import fields private transient Integer rowIndex; private transient Long savingsAccountId; private String dateFormat; private String locale; private LocalDate transactionDate; private BigDecimal transactionAmount; private Long paymentTypeId; private String accountNumber; private String checkNumber; private String routingCode; private String receiptNumber; private String bankNumber; private BigDecimal cumulativeBalance; private LocalDate balanceEndDate; private final transient List<TaxDetailsData> taxDetails = new ArrayList<>(); private Integer balanceNumberOfDays; private BigDecimal overdraftAmount; private transient Long modifiedId; private transient String refNo; private Boolean isOverdraft; private Long accountCredit; private Long accountDebit; private SavingsAccountTransactionData(final Long id, final SavingsAccountTransactionEnumData transactionType, final PaymentDetailData paymentDetailData, final Long savingsId, final String savingsAccountNo, final LocalDate transactionDate, final CurrencyData currency, final BigDecimal amount, final BigDecimal outstandingChargeAmount, final BigDecimal runningBalance, final boolean reversed, final AccountTransferData transfer, final Collection<PaymentTypeData> paymentTypeOptions, final LocalDate submittedOnDate, final boolean interestedPostedAsOn, final String submittedByUsername, final String note, final Boolean isReversal, final Long originalTransactionId, boolean isManualTransaction, final Boolean lienTransaction, final Long releaseTransactionId, final String reasonForBlock, final Boolean isOverdraft) { this.id = id; this.transactionType = transactionType; TransactionEntryType entryType = null; if (transactionType != null) { entryType = transactionType.getEntryType(); entryType = entryType != null && Boolean.TRUE.equals(isReversal) ? entryType.getReversal() : entryType; } this.entryType = entryType; // duplicated fields this.accountId = savingsId; this.accountNo = savingsAccountNo; this.date = transactionDate; this.amount = amount; this.paymentDetailData = paymentDetailData; this.currency = currency; this.outstandingChargeAmount = outstandingChargeAmount; this.runningBalance = runningBalance; this.reversed = reversed; this.transfer = transfer; this.paymentTypeOptions = paymentTypeOptions; this.submittedOnDate = submittedOnDate; this.interestedPostedAsOn = interestedPostedAsOn; this.submittedByUsername = submittedByUsername; this.note = note; this.isManualTransaction = isManualTransaction; this.isReversal = isReversal; this.originalTransactionId = originalTransactionId; this.lienTransaction = lienTransaction; this.releaseTransactionId = releaseTransactionId; this.reasonForBlock = reasonForBlock; this.isOverdraft = isOverdraft; } private static SavingsAccountTransactionData createData(final Long id, final SavingsAccountTransactionEnumData transactionType, final PaymentDetailData paymentDetailData, final Long accountId, final String accountNo, final LocalDate date, final CurrencyData currency, final BigDecimal amount, final BigDecimal outstandingChargeAmount, final BigDecimal runningBalance, final boolean reversed, final AccountTransferData transfer, final Collection<PaymentTypeData> paymentTypeOptions, final LocalDate submittedOnDate, final boolean interestedPostedAsOn, final String submittedByUsername, final String note, final Boolean lienTransaction) { return new SavingsAccountTransactionData(id, transactionType, paymentDetailData, accountId, accountNo, date, currency, amount, outstandingChargeAmount, runningBalance, reversed, transfer, paymentTypeOptions, submittedOnDate, interestedPostedAsOn, submittedByUsername, note, null, null, false, lienTransaction, null, null, false); } public static SavingsAccountTransactionData create(final Long id, final SavingsAccountTransactionEnumData transactionType, final PaymentDetailData paymentDetailData, final Long savingsId, final String savingsAccountNo, final LocalDate date, final CurrencyData currency, final BigDecimal amount, final BigDecimal outstandingChargeAmount, final BigDecimal runningBalance, final boolean reversed, final AccountTransferData transfer, final LocalDate submittedOnDate, final boolean interestedPostedAsOn, final String submittedByUsername, final String note, final Boolean isReversal, final Long originalTransactionId, final Boolean lienTransaction, final Long releaseTransactionId, final String reasonForBlock) { return new SavingsAccountTransactionData(id, transactionType, paymentDetailData, savingsId, savingsAccountNo, date, currency, amount, outstandingChargeAmount, runningBalance, reversed, transfer, null, submittedOnDate, interestedPostedAsOn, submittedByUsername, note, isReversal, originalTransactionId, false, lienTransaction, releaseTransactionId, reasonForBlock, false); } public static SavingsAccountTransactionData create(final Long id, final SavingsAccountTransactionEnumData transactionType, final PaymentDetailData paymentDetailData, final Long savingsId, final String savingsAccountNo, final LocalDate date, final CurrencyData currency, final BigDecimal amount, final BigDecimal outstandingChargeAmount, final BigDecimal runningBalance, final boolean reversed, final AccountTransferData transfer, final boolean interestedPostedAsOn, final String submittedByUsername, final String note, final LocalDate submittedOnDate) { return createData(id, transactionType, paymentDetailData, savingsId, savingsAccountNo, date, currency, amount, outstandingChargeAmount, runningBalance, reversed, transfer, null, submittedOnDate, interestedPostedAsOn, submittedByUsername, note, false); } public static SavingsAccountTransactionData create(final Long id, final SavingsAccountTransactionEnumData transactionType, final PaymentDetailData paymentDetailData, final Long savingsId, final String savingsAccountNo, final LocalDate date, final CurrencyData currency, final BigDecimal amount, final BigDecimal outstandingChargeAmount, final BigDecimal runningBalance, final boolean reversed, final LocalDate submittedOnDate, final boolean interestedPostedAsOn, final BigDecimal cumulativeBalance, final LocalDate balanceEndDate) { SavingsAccountTransactionData data = createData(id, transactionType, paymentDetailData, savingsId, savingsAccountNo, date, currency, amount, outstandingChargeAmount, runningBalance, reversed, null, null, submittedOnDate, interestedPostedAsOn, null, null, false); data.transactionDate = date; data.cumulativeBalance = cumulativeBalance; data.balanceEndDate = balanceEndDate; return data; } public static SavingsAccountTransactionData create(final Long id) { return createData(id, null, null, null, null, null, null, null, null, null, false, null, null, null, false, null, null, false); } public static SavingsAccountTransactionData withWithDrawalTransactionDetails( final SavingsAccountTransactionData savingsAccountTransactionData) { final LocalDate currentDate = DateUtils.getBusinessLocalDate(); final SavingsAccountTransactionEnumData transactionType = SavingsEnumerations .transactionType(SavingsAccountTransactionType.WITHDRAWAL.getValue()); return createData(savingsAccountTransactionData.getId(), transactionType, savingsAccountTransactionData.getPaymentDetailData(), savingsAccountTransactionData.getAccountId(), savingsAccountTransactionData.getAccountNo(), currentDate, savingsAccountTransactionData.getCurrency(), savingsAccountTransactionData.getAmount(), savingsAccountTransactionData.getOutstandingChargeAmount(), savingsAccountTransactionData.getRunningBalance(), savingsAccountTransactionData.isReversed(), savingsAccountTransactionData.getTransfer(), savingsAccountTransactionData.getPaymentTypeOptions(), savingsAccountTransactionData.getSubmittedOnDate(), savingsAccountTransactionData.isInterestedPostedAsOn(), savingsAccountTransactionData.getSubmittedByUsername(), savingsAccountTransactionData.getNote(), savingsAccountTransactionData.getLienTransaction()); } public static SavingsAccountTransactionData template(final Long savingsId, final String savingsAccountNo, final LocalDate defaultLocalDate, final CurrencyData currency) { return createData(null, null, null, savingsId, savingsAccountNo, defaultLocalDate, currency, null, null, null, false, null, null, defaultLocalDate, false, null, null, false); } public static SavingsAccountTransactionData templateOnTop(final SavingsAccountTransactionData savingsAccountTransactionData, final Collection<PaymentTypeData> paymentTypeOptions) { return createData(savingsAccountTransactionData.getId(), savingsAccountTransactionData.getTransactionType(), savingsAccountTransactionData.getPaymentDetailData(), savingsAccountTransactionData.getAccountId(), savingsAccountTransactionData.getAccountNo(), savingsAccountTransactionData.getDate(), savingsAccountTransactionData.getCurrency(), savingsAccountTransactionData.getAmount(), savingsAccountTransactionData.getOutstandingChargeAmount(), savingsAccountTransactionData.getRunningBalance(), savingsAccountTransactionData.isReversed(), savingsAccountTransactionData.getTransfer(), paymentTypeOptions, savingsAccountTransactionData.getSubmittedOnDate(), savingsAccountTransactionData.isInterestedPostedAsOn(), savingsAccountTransactionData.getSubmittedByUsername(), savingsAccountTransactionData.getNote(), savingsAccountTransactionData.getLienTransaction()); } private static SavingsAccountTransactionData createImport(final SavingsAccountTransactionEnumData transactionType, final PaymentDetailData paymentDetailData, final Long savingsAccountId, final String accountNumber, final LocalDate transactionDate, final BigDecimal transactionAmount, final boolean reversed, final LocalDate submittedOnDate, boolean isManualTransaction, final Boolean lienTransaction, final Boolean isOverdraft) { SavingsAccountTransactionData data = new SavingsAccountTransactionData(null, transactionType, paymentDetailData, savingsAccountId, accountNumber, transactionDate, null, transactionAmount, null, null, reversed, null, null, submittedOnDate, false, null, null, null, null, isManualTransaction, lienTransaction, null, null, isOverdraft); // duplicated import fields data.savingsAccountId = savingsAccountId; data.accountNumber = accountNumber; data.transactionDate = transactionDate; data.transactionAmount = transactionAmount; return data; } public static SavingsAccountTransactionData copyTransaction(SavingsAccountTransactionData accountTransaction) { return createImport(accountTransaction.getTransactionType(), accountTransaction.getPaymentDetailData(), accountTransaction.getSavingsAccountId(), null, accountTransaction.getTransactionDate(), accountTransaction.getAmount(), accountTransaction.isReversed(), accountTransaction.getSubmittedOnDate(), accountTransaction.isManualTransaction(), accountTransaction.getLienTransaction(), false); } public static SavingsAccountTransactionData importInstance(BigDecimal transactionAmount, LocalDate transactionDate, Long paymentTypeId, String accountNumber, String checkNumber, String routingCode, String receiptNumber, String bankNumber, String note, Long savingsAccountId, SavingsAccountTransactionEnumData transactionType, Integer rowIndex, String locale, String dateFormat) { SavingsAccountTransactionData data = createImport(transactionType, null, savingsAccountId, accountNumber, transactionDate, transactionAmount, false, transactionDate, false, false, false); data.rowIndex = rowIndex; data.paymentTypeId = paymentTypeId; data.checkNumber = checkNumber; data.routingCode = routingCode; data.receiptNumber = receiptNumber; data.bankNumber = bankNumber; data.note = note; data.locale = locale; data.dateFormat = dateFormat; return data; } private static SavingsAccountTransactionData createImport(SavingsAccountTransactionEnumData transactionType, Long savingsAccountId, LocalDate transactionDate, BigDecimal transactionAmount, final LocalDate submittedOnDate, boolean isManualTransaction, Boolean isOverdraft) { // import transaction return createImport(transactionType, null, savingsAccountId, null, transactionDate, transactionAmount, false, submittedOnDate, isManualTransaction, false, isOverdraft); } public static SavingsAccountTransactionData interestPosting(final SavingsAccountData savingsAccount, final LocalDate date, final Money amount, final boolean isManualTransaction) { final LocalDate submittedOnDate = DateUtils.getBusinessLocalDate(); final SavingsAccountTransactionType savingsAccountTransactionType = SavingsAccountTransactionType.INTEREST_POSTING; SavingsAccountTransactionEnumData transactionType = new SavingsAccountTransactionEnumData( savingsAccountTransactionType.getValue().longValue(), savingsAccountTransactionType.getCode(), savingsAccountTransactionType.getValue().toString()); return createImport(transactionType, savingsAccount.getId(), date, amount.getAmount(), submittedOnDate, isManualTransaction, false); } public static SavingsAccountTransactionData accrual(final SavingsAccountData savingsAccount, final LocalDate date, final Money amount, final boolean isManualTransaction) { final LocalDate submittedOnDate = DateUtils.getBusinessLocalDate(); final SavingsAccountTransactionType savingsAccountTransactionType = SavingsAccountTransactionType.ACCRUAL; SavingsAccountTransactionEnumData transactionType = new SavingsAccountTransactionEnumData( savingsAccountTransactionType.getValue().longValue(), savingsAccountTransactionType.getCode(), savingsAccountTransactionType.getValue().toString()); return createImport(transactionType, savingsAccount.getId(), date, amount.getAmount(), submittedOnDate, isManualTransaction, false); } public static SavingsAccountTransactionData overdraftInterest(final SavingsAccountData savingsAccount, final LocalDate date, final Money amount, final boolean isManualTransaction, final Boolean isOverdraft) { final LocalDate submittedOnDate = DateUtils.getBusinessLocalDate(); final SavingsAccountTransactionType savingsAccountTransactionType = SavingsAccountTransactionType.OVERDRAFT_INTEREST; SavingsAccountTransactionEnumData transactionType = new SavingsAccountTransactionEnumData( savingsAccountTransactionType.getValue().longValue(), savingsAccountTransactionType.getCode(), savingsAccountTransactionType.getValue().toString()); return createImport(transactionType, savingsAccount.getId(), date, amount.getAmount(), submittedOnDate, isManualTransaction, isOverdraft); } public static SavingsAccountTransactionData withHoldTax(final SavingsAccountData savingsAccount, final LocalDate date, final Money amount, final Map<TaxComponentData, BigDecimal> taxDetails) { final LocalDate submittedOnDate = DateUtils.getBusinessLocalDate(); SavingsAccountTransactionType savingsAccountTransactionType = SavingsAccountTransactionType.WITHHOLD_TAX; SavingsAccountTransactionEnumData transactionType = new SavingsAccountTransactionEnumData( savingsAccountTransactionType.getValue().longValue(), savingsAccountTransactionType.getCode(), savingsAccountTransactionType.getValue().toString()); SavingsAccountTransactionData accountTransaction = createImport(transactionType, savingsAccount.getId(), date, amount.getAmount(), submittedOnDate, false, false); accountTransaction.addTaxDetails(taxDetails); return accountTransaction; } public boolean isInterestPostingAndNotReversed() { return this.transactionType.isInterestPosting() && isNotReversed(); } public void setTaxDetails(final TaxDetailsData taxDetails) { this.taxDetails.add(taxDetails); } public boolean isOverdraftInterestAndNotReversed() { return this.transactionType.isIncomeFromInterest() && isNotReversed(); } public boolean isCredit() { return transactionType.isCredit() && isNotReversed() && !isReversalTransaction(); } public boolean isDebit() { return transactionType.isDebit() && isNotReversed() && !isReversalTransaction(); } public boolean isWithdrawalFeeAndNotReversed() { return transactionType.isFeeDeduction() && isNotReversed(); } public boolean isPayCharge() { return this.transactionType.isPayCharge(); } public void updateRunningBalance(final Money balance) { this.runningBalance = balance.getAmount(); } public void updateOverdraftAmount(BigDecimal overdraftAmount) { this.overdraftAmount = overdraftAmount; } public boolean isAmountOnHold() { return this.transactionType.isAmountHold(); } public boolean isAnnualFeeAndNotReversed() { return isAnnualFee() && isNotReversed(); } public boolean isAnnualFee() { return this.transactionType.isAnnualFee(); } public Money getRunningBalance(final CurrencyData currency) { return Money.of(currency, this.runningBalance); } public Money getRunningBalance(final MonetaryCurrency currency) { return Money.of(currency, this.runningBalance); } public boolean isDepositAndNotReversed() { return this.transactionType.isDeposit() && isNotReversed(); } public boolean isDividendPayoutAndNotReversed() { return this.transactionType.isDividendPayout() && isNotReversed(); } public void setRefNo(final String uuid) { this.refNo = uuid; } public void setBalanceNumberOfDays(final Integer balanceNumberOfDays) { this.balanceNumberOfDays = balanceNumberOfDays; } public EndOfDayBalance toEndOfDayBalance(final Money openingBalance) { final MonetaryCurrency currency = openingBalance.getCurrency(); Money endOfDayBalance = openingBalance.copy(); if (isDeposit() || isDividendPayoutAndNotReversed()) { endOfDayBalance = Money.of(currency, this.runningBalance); } else if (isWithdrawal() || isChargeTransactionAndNotReversed()) { if (isWithdrawal()) { endOfDayBalance = Money.of(currency, this.runningBalance); } else if (openingBalance.isGreaterThanZero()) { endOfDayBalance = openingBalance.minus(getAmount()); } else { endOfDayBalance = Money.of(currency, this.runningBalance); } } return EndOfDayBalance.from(getTransactionDate(), openingBalance, endOfDayBalance, this.balanceNumberOfDays); } public EndOfDayBalance toEndOfDayBalanceDates(final Money openingBalance, LocalDateInterval date) { final MonetaryCurrency currency = openingBalance.getCurrency(); Money endOfDayBalance = Money.of(currency, this.runningBalance); return EndOfDayBalance.from(getTransactionDate(), openingBalance, endOfDayBalance, this.balanceNumberOfDays != null ? this.balanceNumberOfDays : date.endDate().getDayOfMonth()); } public boolean isChargeTransactionAndNotReversed() { return this.transactionType.isChargeTransaction() && isNotReversed(); } public boolean occursOn(final LocalDate occursOnDate) { return DateUtils.isEqual(occursOnDate, getTransactionDate()); } public EndOfDayBalance toEndOfDayBalanceBoundedBy(final Money openingBalance, final LocalDateInterval boundedBy, final boolean isAllowOverdraft) { final MonetaryCurrency currency = openingBalance.getCurrency(); Money endOfDayBalance = openingBalance.copy(); int numberOfDaysOfBalance = this.balanceNumberOfDays; LocalDate balanceStartDate = getTransactionDate(); LocalDate balanceEndDate = getEndOfBalanceLocalDate(); if (DateUtils.isAfter(boundedBy.startDate(), balanceStartDate)) { balanceStartDate = boundedBy.startDate(); final LocalDateInterval spanOfBalance = LocalDateInterval.create(balanceStartDate, balanceEndDate); numberOfDaysOfBalance = spanOfBalance.daysInPeriodInclusiveOfEndDate(); } else { if (isDeposit() || isDividendPayoutAndNotReversed()) { endOfDayBalance = endOfDayBalance.plus(getAmount()); } else if (isWithdrawal() || isChargeTransactionAndNotReversed()) { if (endOfDayBalance.isLessThanZero() && isAllowOverdraft) { endOfDayBalance = Money.of(currency, this.runningBalance); } else if (endOfDayBalance.isGreaterThanZero() || isAllowOverdraft) { endOfDayBalance = endOfDayBalance.minus(getAmount()); } else { endOfDayBalance = Money.of(currency, this.runningBalance); } } } if (DateUtils.isAfter(balanceEndDate, boundedBy.endDate())) { balanceEndDate = boundedBy.endDate(); final LocalDateInterval spanOfBalance = LocalDateInterval.create(balanceStartDate, balanceEndDate); numberOfDaysOfBalance = spanOfBalance.daysInPeriodInclusiveOfEndDate(); } return EndOfDayBalance.from(balanceStartDate, openingBalance, endOfDayBalance, numberOfDaysOfBalance); } public void reverse() { this.reversed = true; } public boolean fallsWithin(final LocalDateInterval periodInterval) { final LocalDateInterval balanceInterval = LocalDateInterval.create(getTransactionDate(), getEndOfBalanceLocalDate()); return periodInterval.contains(balanceInterval); } public LocalDate getEndOfBalanceLocalDate() { return this.balanceEndDate == null ? null : this.balanceEndDate; } public void zeroBalanceFields() { this.runningBalance = null; this.cumulativeBalance = null; this.balanceEndDate = null; this.balanceNumberOfDays = null; } public boolean isAmountRelease() { return this.transactionType.isAmountRelease(); } public boolean isDeposit() { return this.transactionType.isDeposit(); } public boolean isChargeTransaction() { return this.transactionType.isChargeTransaction(); } public Set<SavingsAccountChargesPaidByData> getSavingsAccountChargesPaid() { return this.chargesPaidByData; } public void updateCumulativeBalanceAndDates(final MonetaryCurrency currency, final LocalDate endOfBalanceDate) { // balance end date should not be before transaction date if (endOfBalanceDate != null && DateUtils.isBefore(endOfBalanceDate, this.transactionDate)) { this.balanceEndDate = this.transactionDate; } else if (endOfBalanceDate != null) { this.balanceEndDate = endOfBalanceDate; } else { this.balanceEndDate = null; } this.balanceNumberOfDays = LocalDateInterval.create(getTransactionDate(), endOfBalanceDate).daysInPeriodInclusiveOfEndDate(); this.cumulativeBalance = Money.of(currency, this.runningBalance).multipliedBy(this.balanceNumberOfDays).getAmount(); } public boolean hasNotAmount(final Money amountToCheck) { final Money transactionAmount = getAmount(amountToCheck.getCurrency()); return transactionAmount.isNotEqualTo(amountToCheck); } public boolean isFeeChargeAndNotReversed() { return isFeeCharge() && isNotReversed(); } public boolean isFeeCharge() { final SavingsAccountChargesPaidByData chargePaidBy = getSavingsAccountChargePaidBy(); return isPayCharge() && chargePaidBy != null && chargePaidBy.isFeeCharge(); } public void setChargesPaidByData(final SavingsAccountChargesPaidByData savingsAccountChargesPaidByData) { this.chargesPaidByData.add(savingsAccountChargesPaidByData); } public void setOverdraftAmount(final BigDecimal overdraftAmount) { this.overdraftAmount = overdraftAmount; } public boolean isPenaltyChargeAndNotReversed() { return isPenaltyCharge() && isNotReversed(); } public boolean isPenaltyCharge() { final SavingsAccountChargesPaidByData chargePaidBy = getSavingsAccountChargePaidBy(); return isPayCharge() && chargePaidBy != null && chargePaidBy.isPenaltyCharge(); } public boolean isWaiveFeeChargeAndNotReversed() { return isWaiveFeeCharge() && isNotReversed(); } public boolean isWaiveFeeCharge() { final SavingsAccountChargesPaidByData chargePaidBy = getSavingsAccountChargePaidBy(); return isWaiveCharge() && chargePaidBy != null && chargePaidBy.isFeeCharge(); } public boolean isWaiveCharge() { return SavingsAccountTransactionType.fromInt(this.transactionType.getId().intValue()).isWaiveCharge(); } public boolean isWaivePenaltyChargeAndNotReversed() { return isWaivePenaltyCharge() && isNotReversed(); } public boolean isWaivePenaltyCharge() { final SavingsAccountChargesPaidByData chargePaidBy = getSavingsAccountChargePaidBy(); return isWaiveCharge() && chargePaidBy != null && chargePaidBy.isPenaltyCharge(); } private SavingsAccountChargesPaidByData getSavingsAccountChargePaidBy() { if (!CollectionUtils.isEmpty(this.chargesPaidByData)) { return this.chargesPaidByData.iterator().next(); } return null; } public Money getAmount(final MonetaryCurrency currency) { return Money.of(currency, this.amount); } public void addTaxDetails(final Map<TaxComponentData, BigDecimal> taxDetails) { if (taxDetails != null) { List<TaxDetailsData> thisTaxDetails = getTaxDetails(); for (Map.Entry<TaxComponentData, BigDecimal> mapEntry : taxDetails.entrySet()) { thisTaxDetails.add(new TaxDetailsData(mapEntry.getKey(), mapEntry.getValue())); } } } public Map<String, Object> toMapData(final CurrencyData currencyData, final Long officeId) { final Map<String, Object> thisTransactionData = new LinkedHashMap<>(); final SavingsAccountTransactionEnumData transactionType = SavingsEnumerations .transactionType(this.transactionType.getId().intValue()); thisTransactionData.put("id", getId()); thisTransactionData.put("officeId", officeId); thisTransactionData.put("type", transactionType); thisTransactionData.put("reversed", isReversed()); thisTransactionData.put("date", getTransactionDate()); thisTransactionData.put("currency", currencyData); thisTransactionData.put("amount", this.amount); thisTransactionData.put("overdraftAmount", this.overdraftAmount); if (this.paymentDetailData != null) { thisTransactionData.put("paymentTypeId", this.paymentDetailData.getPaymentType().getId()); } // Sending data in a map, though in savings we currently expect a transaction to always repay a single charge // (or may repay a part of a single charge too) if (!this.chargesPaidByData.isEmpty()) { final List<Map<String, Object>> savingsChargesPaidData = new ArrayList<>(); for (final SavingsAccountChargesPaidByData chargePaidBy : this.chargesPaidByData) { final Map<String, Object> savingChargePaidData = new LinkedHashMap<>(); savingChargePaidData.put("chargeId", chargePaidBy.getSavingsAccountChargeData()); savingChargePaidData.put("isPenalty", chargePaidBy.getSavingsAccountChargeData().isPenalty()); savingChargePaidData.put("savingsChargeId", chargePaidBy.getSavingsAccountChargeData().getId()); savingChargePaidData.put("amount", chargePaidBy.getAmount()); savingsChargesPaidData.add(savingChargePaidData); } thisTransactionData.put("savingsChargesPaid", savingsChargesPaidData); } if (this.taxDetails != null && !this.taxDetails.isEmpty()) { final List<Map<String, Object>> taxData = new ArrayList<>(); for (final TaxDetailsData taxDetails : this.taxDetails) { final Map<String, Object> taxDetailsData = new HashMap<>(); taxDetailsData.put("amount", taxDetails.getAmount()); if (taxDetails.getTaxComponent().getCreditAccount() != null) { taxDetailsData.put("creditAccountId", taxDetails.getTaxComponent().getCreditAccount().getId()); } taxData.add(taxDetailsData); } thisTransactionData.put("taxDetails", taxData); } return thisTransactionData; } public boolean isWithdrawal() { return this.transactionType.isWithdrawal(); } public boolean isInterestPosting() { return this.transactionType.isInterestPosting() || this.transactionType.isOverDraftInterestPosting(); } public boolean isWithHoldTaxAndNotReversed() { return SavingsAccountTransactionType.fromInt(this.transactionType.getId().intValue()).isWithHoldTax() && isNotReversed(); } public boolean isAccrual() { return SavingsAccountTransactionType.fromInt(this.transactionType.getId().intValue()).isAccrual(); } public boolean isNotReversed() { return !isReversed(); } public boolean spansAnyPortionOf(final LocalDateInterval periodInterval) { final LocalDateInterval balanceInterval = LocalDateInterval.create(getTransactionDate(), getEndOfBalanceLocalDate()); return balanceInterval.containsPortionOf(periodInterval); } public void setId(final Long id) { this.id = id; this.modifiedId = id; } public boolean isReversalTransaction() { return Boolean.TRUE.equals(this.isReversal); } public boolean isManualTransaction() { return isManualTransaction; } public boolean isIsManualTransaction() { return isManualTransaction; } public TransactionEntryType getEntryType() { return entryType; } public void setAccountCredit(Long accountCredit) { this.accountCredit = accountCredit; } public void setAccountDebit(Long accountDebit) { this.accountDebit = accountDebit; } }
apache/geode
35,232
geode-core/src/main/java/org/apache/geode/internal/cache/tier/sockets/Message.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.tier.sockets; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.Socket; import java.nio.ByteBuffer; import java.nio.channels.SocketChannel; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.apache.geode.SerializationException; import org.apache.geode.annotations.Immutable; import org.apache.geode.annotations.internal.MakeNotStatic; import org.apache.geode.internal.Assert; import org.apache.geode.internal.HeapDataOutputStream; import org.apache.geode.internal.cache.TXManagerImpl; import org.apache.geode.internal.cache.tier.MessageType; import org.apache.geode.internal.offheap.StoredObject; import org.apache.geode.internal.offheap.annotations.Unretained; import org.apache.geode.internal.serialization.KnownVersion; import org.apache.geode.internal.util.BlobHelper; import org.apache.geode.logging.internal.log4j.api.LogService; import org.apache.geode.util.internal.GeodeGlossary; /** * This class encapsulates the wire protocol. It provides accessors to encode and decode a message * and serialize it out to the wire. * * <PRE> * messageType - int - 4 bytes type of message, types enumerated below * * msgLength - int - 4 bytes total length of variable length payload * * numberOfParts - int - 4 bytes number of elements (LEN-BYTE* pairs) * contained in the payload. Message can * be a multi-part message * * transId - int - 4 bytes filled in by the requester, copied back into * the response * * flags - byte- 1 byte filled in by the requester * len1 * part1 * . * . * . * lenn * partn * </PRE> * * We read the fixed length 16 bytes into a byte[] and populate a bytebuffer We read the fixed * length header tokens from the header parse the header and use information contained in there to * read the payload. * * <P> * * See also <a href="package-summary.html#messages">package description</a>. * * @see MessageType */ public class Message { // Tentative workaround to avoid OOM stated in #46754. public static final ThreadLocal<MessageType> MESSAGE_TYPE = new ThreadLocal<>(); public static final String MAX_MESSAGE_SIZE_PROPERTY = GeodeGlossary.GEMFIRE_PREFIX + "client.max-message-size"; static final int DEFAULT_MAX_MESSAGE_SIZE = 1073741824; private static final Logger logger = LogService.getLogger(); private static final int PART_HEADER_SIZE = 5; // 4 bytes for length, 1 byte for isObject private static final int FIXED_LENGTH = 17; private static final ThreadLocal<ByteBuffer> tlCommBuffer = new ThreadLocal<>(); // These two statics are fields shoved into the flags byte for transmission. // The MESSAGE_IS_RETRY bit is stripped out during deserialization but the other // is left in place private static final byte MESSAGE_HAS_SECURE_PART = (byte) 0x02; private static final byte MESSAGE_IS_RETRY = (byte) 0x04; private static final byte MESSAGE_IS_RETRY_MASK = (byte) 0xFB; private static final int DEFAULT_CHUNK_SIZE = 1024; @Immutable private static final byte[] TRUE = defineTrue(); @Immutable private static final byte[] FALSE = defineFalse(); private static byte[] defineTrue() { try (HeapDataOutputStream hdos = new HeapDataOutputStream(10, null)) { BlobHelper.serializeTo(Boolean.TRUE, hdos); return hdos.toByteArray(); } catch (IOException e) { throw new IllegalStateException(e); } } private static byte[] defineFalse() { try (HeapDataOutputStream hdos = new HeapDataOutputStream(10, null)) { BlobHelper.serializeTo(Boolean.FALSE, hdos); return hdos.toByteArray(); } catch (IOException e) { throw new IllegalStateException(e); } } /** * The maximum size of an outgoing message. If the message is larger than this maximum, it may * cause the receiver to throw an exception on message part length mismatch due to overflow in * message size. * * This value is STATIC because getting a system property requires holding a lock. It is costly to * do this for every message sent. If this value needs to be modified for testing, please add a * new constructor. */ private static final int maxMessageSize = Integer.getInteger(MAX_MESSAGE_SIZE_PROPERTY, DEFAULT_MAX_MESSAGE_SIZE); protected MessageType messageType; private int payloadLength = 0; int numberOfParts; protected int transactionId = TXManagerImpl.NOTX; int currentPart = 0; private Part[] partsList; private ByteBuffer cachedCommBuffer; protected Socket socket = null; private SocketChannel socketChannel = null; private OutputStream outputStream = null; protected InputStream inputStream = null; private boolean messageModified = true; /** is this message a retry of a previously sent message? */ private boolean isRetry; private byte flags = 0x00; MessageStats messageStats = null; protected ServerConnection serverConnection = null; private int maxIncomingMessageLength = -1; private Semaphore dataLimiter = null; private Semaphore messageLimiter = null; private boolean readHeader = false; private int chunkSize = DEFAULT_CHUNK_SIZE; Part securePart = null; private boolean isMetaRegion = false; private KnownVersion version; /** * Creates a new message with the given number of parts */ public Message(int numberOfParts, KnownVersion destVersion) { version = destVersion; Assert.assertTrue(destVersion != null, "Attempt to create an unversioned message"); partsList = new Part[numberOfParts]; this.numberOfParts = numberOfParts; int partsListLength = partsList.length; for (int i = 0; i < partsListLength; i++) { partsList[i] = new Part(); } } public boolean isSecureMode() { return securePart != null; } public byte[] getSecureBytes() throws IOException, ClassNotFoundException { return (byte[]) securePart.getObject(); } public void setMessageType(final @NotNull MessageType messageType) { messageModified = true; this.messageType = messageType; } public void setVersion(KnownVersion clientVersion) { version = clientVersion; } public void setMessageHasSecurePartFlag() { flags |= MESSAGE_HAS_SECURE_PART; } public void clearMessageHasSecurePartFlag() { flags &= MESSAGE_HAS_SECURE_PART; } /** * Sets and builds the {@link Part}s that are sent in the payload of the Message */ public void setNumberOfParts(int numberOfParts) { // hitesh: need to add security header here from server // need to insure it is not chunked message // should we look message type to avoid internal message like ping messageModified = true; currentPart = 0; this.numberOfParts = numberOfParts; if (numberOfParts > partsList.length) { Part[] newPartsList = new Part[numberOfParts]; for (int i = 0; i < numberOfParts; i++) { if (i < partsList.length) { newPartsList[i] = partsList[i]; } else { newPartsList[i] = new Part(); } } partsList = newPartsList; } } /** * For boundary testing we may need to inject mock parts. For testing only. */ void setParts(Part[] parts) { partsList = parts; } public void setTransactionId(int transactionId) { messageModified = true; this.transactionId = transactionId; } public void setIsRetry() { isRetry = true; } /** * This returns true if the message has been marked as having been previously transmitted to a * different server. */ public boolean isRetry() { return isRetry; } /* Sets size for HDOS chunk. */ public void setChunkSize(int chunkSize) { this.chunkSize = chunkSize; } /** * When building a Message this will return the number of the next Part to be added to the message */ int getNextPartNumber() { return currentPart; } public void addStringPart(String str) { addStringPart(str, false); } @MakeNotStatic("not tied to the cache lifecycle") private static final Map<String, byte[]> CACHED_STRINGS = new ConcurrentHashMap<>(); public void addStringPart(String str, boolean enableCaching) { if (str == null) { addRawPart(null, false); return; } Part part = partsList[currentPart]; if (enableCaching) { byte[] bytes = CACHED_STRINGS.get(str); if (bytes == null) { try (HeapDataOutputStream hdos = new HeapDataOutputStream(str)) { bytes = hdos.toByteArray(); CACHED_STRINGS.put(str, bytes); } } part.setPartState(bytes, false); } else { // do NOT close the HeapDataOutputStream messageModified = true; part.setPartState(new HeapDataOutputStream(str), false); } currentPart++; } /* * Adds a new part to this message that contains a {@code byte} array (as opposed to a serialized * object). * * @see #addPart(byte[], boolean) */ public void addBytesPart(byte[] newPart) { addRawPart(newPart, false); } public void addStringOrObjPart(Object o) { if (o instanceof String || o == null) { addStringPart((String) o); } else { // Note even if o is a byte[] we need to serialize it. // This could be cleaned up but it would require C client code to change. serializeAndAddPart(o, false); } } public void addObjPart(Object o) { addObjPart(o, false); } /** * Like addObjPart(Object) but also prefers to reference objects in the part instead of copying * them into a byte buffer. */ public void addObjPartNoCopying(Object o) { if (o == null || o instanceof byte[]) { addRawPart((byte[]) o, false); } else { serializeAndAddPartNoCopying(o); } } public void addObjPart(Object o, boolean zipValues) { if (o == null || o instanceof byte[]) { addRawPart((byte[]) o, false); } else if (o instanceof Boolean) { addRawPart((Boolean) o ? TRUE : FALSE, true); } else { serializeAndAddPart(o, zipValues); } } /** * Object o is always null */ public void addPartInAnyForm(@Unretained Object o, boolean isObject) { if (o == null) { addRawPart((byte[]) o, false); } else if (o instanceof byte[]) { addRawPart((byte[]) o, isObject); } else if (o instanceof StoredObject) { // It is possible it is an off-heap StoredObject that contains a simple non-object byte[]. messageModified = true; Part part = partsList[currentPart]; part.setPartState((StoredObject) o, isObject); currentPart++; } else { serializeAndAddPart(o, false); } } private void serializeAndAddPartNoCopying(Object o) { KnownVersion v = version; if (version.equals(KnownVersion.CURRENT)) { v = null; } // Create the HDOS with a flag telling it that it can keep any byte[] or ByteBuffers/ByteSources // passed to it. Do NOT close the HeapDataOutputStream! HeapDataOutputStream hdos = new HeapDataOutputStream(chunkSize, v, true); try { BlobHelper.serializeTo(o, hdos); } catch (IOException ex) { throw new SerializationException("failed serializing object", ex); } messageModified = true; Part part = partsList[currentPart]; part.setPartState(hdos, true); currentPart++; } private void serializeAndAddPart(Object o, boolean zipValues) { if (zipValues) { throw new UnsupportedOperationException("zipValues no longer supported"); } KnownVersion v = version; if (version.equals(KnownVersion.CURRENT)) { v = null; } // do NOT close the HeapDataOutputStream HeapDataOutputStream hdos = new HeapDataOutputStream(chunkSize, v); try { BlobHelper.serializeTo(o, hdos); } catch (IOException ex) { throw new SerializationException("failed serializing object", ex); } messageModified = true; Part part = partsList[currentPart]; part.setPartState(hdos, true); currentPart++; } public void addIntPart(int v) { messageModified = true; Part part = partsList[currentPart]; part.setInt(v); currentPart++; } public void addLongPart(long v) { messageModified = true; Part part = partsList[currentPart]; part.setLong(v); currentPart++; } public void addBytePart(byte v) { messageModified = true; Part part = partsList[currentPart]; part.setByte(v); currentPart++; } /** * Adds a new part to this message that may contain a serialized object. */ public void addRawPart(byte[] newPart, boolean isObject) { messageModified = true; Part part = partsList[currentPart]; part.setPartState(newPart, isObject); currentPart++; } public MessageType getMessageType() { return messageType; } public int getPayloadLength() { return payloadLength; } public int getHeaderLength() { return FIXED_LENGTH; } public int getNumberOfParts() { return numberOfParts; } public int getTransactionId() { return transactionId; } public Part getPart(int index) { if (index < numberOfParts) { Part p = partsList[index]; if (version != null) { p.setVersion(version); } return p; } return null; } public static ByteBuffer setTLCommBuffer(ByteBuffer bb) { ByteBuffer result = tlCommBuffer.get(); tlCommBuffer.set(bb); return result; } public ByteBuffer getCommBuffer() { if (cachedCommBuffer != null) { return cachedCommBuffer; } else { return tlCommBuffer.get(); } } public void clear() { isRetry = false; int len = payloadLength; if (len != 0) { payloadLength = 0; } if (readHeader) { if (messageStats != null) { messageStats.decMessagesBeingReceived(len); } } ByteBuffer buffer = getCommBuffer(); if (buffer != null) { buffer.clear(); } clearParts(); if (len != 0 && dataLimiter != null) { dataLimiter.release(len); dataLimiter = null; maxIncomingMessageLength = 0; } if (readHeader) { if (messageLimiter != null) { messageLimiter.release(1); messageLimiter = null; } readHeader = false; } flags = 0; } protected void packHeaderInfoForSending(int msgLen, boolean isSecurityHeader) { // setting second bit of flags byte for client this is not require but this makes all changes // easily at client side right now just see this bit and process security header byte flagsByte = flags; if (isSecurityHeader) { flagsByte |= MESSAGE_HAS_SECURE_PART; } if (isRetry) { flagsByte |= MESSAGE_IS_RETRY; } getCommBuffer().putInt(messageType.id).putInt(msgLen).putInt(numberOfParts) .putInt(transactionId).put(flagsByte); } protected Part getSecurityPart() { if (serverConnection != null) { // look types right put get etc return serverConnection.updateAndGetSecurityPart(); } return null; } public void setSecurePart(byte[] bytes) { securePart = new Part(); securePart.setPartState(bytes, false); } public void setMetaRegion(boolean isMetaRegion) { this.isMetaRegion = isMetaRegion; } boolean getAndResetIsMetaRegion() { boolean isMetaRegion = this.isMetaRegion; this.isMetaRegion = false; return isMetaRegion; } /** * Sends this message out on its socket. */ void sendBytes(boolean clearMessage) throws IOException { if (serverConnection != null) { // Keep track of the fact that we are making progress. serverConnection.updateProcessingMessage(); } if (socket == null) { throw new IOException("Dead Connection"); } try { final ByteBuffer commBuffer = getCommBuffer(); if (commBuffer == null) { throw new IOException("No buffer"); } synchronized (commBuffer) { long totalPartLen = 0; long headerLen = 0; int partsToTransmit = numberOfParts; for (int i = 0; i < numberOfParts; i++) { Part part = partsList[i]; headerLen += PART_HEADER_SIZE; totalPartLen += part.getLength(); } Part securityPart = getSecurityPart(); if (securityPart == null) { securityPart = securePart; } if (securityPart != null) { headerLen += PART_HEADER_SIZE; totalPartLen += securityPart.getLength(); partsToTransmit++; } if (headerLen + totalPartLen > Integer.MAX_VALUE) { throw new MessageTooLargeException( "Message size (" + (headerLen + totalPartLen) + ") exceeds maximum integer value"); } int msgLen = (int) (headerLen + totalPartLen); if (msgLen > maxMessageSize) { throw new MessageTooLargeException("Message size (" + msgLen + ") exceeds gemfire.client.max-message-size setting (" + maxMessageSize + ")"); } commBuffer.clear(); packHeaderInfoForSending(msgLen, securityPart != null); for (int i = 0; i < partsToTransmit; i++) { Part part = i == numberOfParts ? securityPart : partsList[i]; if (commBuffer.remaining() < PART_HEADER_SIZE) { flushBuffer(); } int partLen = part.getLength(); commBuffer.putInt(partLen); commBuffer.put(part.getTypeCode()); if (partLen <= commBuffer.remaining()) { part.writeTo(commBuffer); } else { flushBuffer(); if (socketChannel != null) { part.writeTo(socketChannel, commBuffer); } else { part.writeTo(outputStream, commBuffer); } if (messageStats != null) { messageStats.incSentBytes(partLen); } } } if (commBuffer.position() != 0) { flushBuffer(); } messageModified = false; if (socketChannel == null) { outputStream.flush(); } } } finally { if (clearMessage) { clearParts(); } } } void flushBuffer() throws IOException { final ByteBuffer cb = getCommBuffer(); if (socketChannel != null) { cb.flip(); do { socketChannel.write(cb); } while (cb.remaining() > 0); } else { outputStream.write(cb.array(), 0, cb.position()); } if (messageStats != null) { messageStats.incSentBytes(cb.position()); } cb.clear(); } private void readHeaderAndBody(boolean setHeaderReadTimeout, int headerReadTimeoutMillis) throws IOException { clearParts(); // TODO: for server changes make sure sc is not null as this class also used by client int oldTimeout = -1; if (setHeaderReadTimeout) { oldTimeout = socket.getSoTimeout(); socket.setSoTimeout(headerReadTimeoutMillis); } try { fetchHeader(); } finally { if (setHeaderReadTimeout) { socket.setSoTimeout(oldTimeout); } } final ByteBuffer cb = getCommBuffer(); final int type = cb.getInt(); final int len = cb.getInt(); final int numParts = cb.getInt(); final int txid = cb.getInt(); byte bits = cb.get(); cb.clear(); if (!MessageType.validate(type)) { throw new IOException(String.format("Invalid message type %s while reading header", type)); } int timeToWait = 0; if (serverConnection != null) { // Keep track of the fact that a message is being processed. serverConnection.setProcessingMessage(); timeToWait = serverConnection.getClientReadTimeout(); } readHeader = true; if (messageLimiter != null) { for (;;) { serverConnection.getCachedRegionHelper().checkCancelInProgress(null); boolean interrupted = Thread.interrupted(); try { if (timeToWait == 0) { messageLimiter.acquire(1); } else { if (!messageLimiter.tryAcquire(1, timeToWait, TimeUnit.MILLISECONDS)) { if (messageStats instanceof CacheServerStats) { ((CacheServerStats) messageStats).incConnectionsTimedOut(); } throw new IOException( String.format( "Operation timed out on server waiting on concurrent message limiter after waiting %s milliseconds", timeToWait)); } } break; } catch (InterruptedException ignore) { interrupted = true; } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } // for } if (len > 0) { if (maxIncomingMessageLength > 0 && len > maxIncomingMessageLength) { throw new IOException(String.format("Message size %s exceeded max limit of %s", len, maxIncomingMessageLength)); } if (dataLimiter != null) { for (;;) { if (serverConnection != null) { serverConnection.getCachedRegionHelper().checkCancelInProgress(null); } boolean interrupted = Thread.interrupted(); try { if (timeToWait == 0) { dataLimiter.acquire(len); } else { int newTimeToWait = timeToWait; if (messageLimiter != null) { // may have waited for msg limit so recalc time to wait newTimeToWait -= (int) serverConnection.getCurrentMessageProcessingTime(); } if (newTimeToWait <= 0 || !messageLimiter.tryAcquire(1, newTimeToWait, TimeUnit.MILLISECONDS)) { throw new IOException( String.format( "Operation timed out on server waiting on concurrent data limiter after waiting %s milliseconds", timeToWait)); } } // makes sure payloadLength gets set now so we will release the semaphore payloadLength = len; break; // success } catch (InterruptedException ignore) { interrupted = true; } finally { if (interrupted) { Thread.currentThread().interrupt(); } } } } } if (messageStats != null) { messageStats.incMessagesBeingReceived(len); payloadLength = len; // makes sure payloadLength gets set now so we will dec on clear } isRetry = (bits & MESSAGE_IS_RETRY) != 0; bits &= MESSAGE_IS_RETRY_MASK; flags = bits; messageType = MessageType.valueOf(type); readPayloadFields(numParts, len); // Set the header and payload fields only after receiving all the // socket data, providing better message consistency in the face // of exceptional conditions (e.g. IO problems, timeouts etc.) payloadLength = len; // this.numberOfParts = numParts; Already set in setPayloadFields via setNumberOfParts transactionId = txid; flags = bits; if (serverConnection != null) { // Keep track of the fact that a message is being processed. serverConnection.updateProcessingMessage(); } } /** * Read the actual bytes of the header off the socket */ void fetchHeader() throws IOException { final ByteBuffer cb = getCommBuffer(); cb.clear(); // messageType is invalidated here and can be used as an indicator // of problems reading the message messageType = MessageType.INVALID; final int headerLength = getHeaderLength(); if (socketChannel != null) { cb.limit(headerLength); do { int bytesRead = socketChannel.read(cb); if (bytesRead == -1) { throw new EOFException( "The connection has been reset while reading the header"); } if (messageStats != null) { messageStats.incReceivedBytes(bytesRead); } } while (cb.remaining() > 0); cb.flip(); } else { int hdr = 0; do { int bytesRead = inputStream.read(cb.array(), hdr, headerLength - hdr); if (bytesRead == -1) { throw new EOFException( "The connection has been reset while reading the header"); } hdr += bytesRead; if (messageStats != null) { messageStats.incReceivedBytes(bytesRead); } } while (hdr < headerLength); // now setup the commBuffer for the caller to parse it cb.rewind(); } } /** * TODO: refactor overly long method readPayloadFields */ void readPayloadFields(final int numParts, final int len) throws IOException { if (len > 0 && numParts <= 0 || len <= 0 && numParts > 0) { throw new IOException( String.format("Part length ( %s ) and number of parts ( %s ) inconsistent", len, numParts)); } MessageType msgType = MESSAGE_TYPE.get(); if (msgType == MessageType.PING) { // set it to null right away. MESSAGE_TYPE.set(null); // Some number which will not throw OOM but still be acceptable for a ping operation. int pingParts = 10; if (numParts > pingParts) { throw new IOException("Part length ( " + numParts + " ) is inconsistent for " + msgType + " operation."); } } setNumberOfParts(numParts); if (numParts <= 0) { return; } if (len < 0) { logger.info("rpl: neg len: {}", len); throw new IOException("Dead Connection"); } final ByteBuffer cb = getCommBuffer(); cb.clear(); cb.flip(); int readSecurePart = checkAndSetSecurityPart(); int bytesRemaining = len; for (int i = 0; i < numParts + readSecurePart || readSecurePart == 1 && cb.remaining() > 0; i++) { int bytesReadThisTime = readPartChunk(bytesRemaining); bytesRemaining -= bytesReadThisTime; Part part; if (i < numParts) { part = partsList[i]; } else { part = securePart; } int partLen = cb.getInt(); byte partType = cb.get(); byte[] partBytes = null; if (partLen > 0) { partBytes = new byte[partLen]; int alreadyReadBytes = cb.remaining(); if (alreadyReadBytes > 0) { if (partLen < alreadyReadBytes) { alreadyReadBytes = partLen; } cb.get(partBytes, 0, alreadyReadBytes); } // now we need to read partLen - alreadyReadBytes off the wire int off = alreadyReadBytes; int remaining = partLen - off; while (remaining > 0) { if (socketChannel != null) { int bytesThisTime = remaining; cb.clear(); if (bytesThisTime > cb.capacity()) { bytesThisTime = cb.capacity(); } cb.limit(bytesThisTime); int res = socketChannel.read(cb); if (res != -1) { cb.flip(); bytesRemaining -= res; remaining -= res; cb.get(partBytes, off, res); off += res; if (messageStats != null) { messageStats.incReceivedBytes(res); } } else { throw new EOFException( "The connection has been reset while reading a part"); } } else { int res = inputStream.read(partBytes, off, remaining); if (res != -1) { bytesRemaining -= res; remaining -= res; off += res; if (messageStats != null) { messageStats.incReceivedBytes(res); } } else { throw new EOFException( "The connection has been reset while reading a part"); } } } } part.init(partBytes, partType); } } protected int checkAndSetSecurityPart() { if ((flags | MESSAGE_HAS_SECURE_PART) == flags) { securePart = new Part(); return 1; } else { securePart = null; return 0; } } /** * @param bytesRemaining the most bytes we can read * @return the number of bytes read into commBuffer */ private int readPartChunk(int bytesRemaining) throws IOException { final ByteBuffer commBuffer = getCommBuffer(); if (commBuffer.remaining() >= PART_HEADER_SIZE) { // we already have the next part header in commBuffer so just return return 0; } if (commBuffer.position() != 0) { commBuffer.compact(); } else { commBuffer.position(commBuffer.limit()); commBuffer.limit(commBuffer.capacity()); } if (serverConnection != null) { // Keep track of the fact that we are making progress serverConnection.updateProcessingMessage(); } int bytesRead = 0; if (socketChannel != null) { int remaining = commBuffer.remaining(); if (remaining > bytesRemaining) { remaining = bytesRemaining; commBuffer.limit(commBuffer.position() + bytesRemaining); } while (remaining > 0) { int res = socketChannel.read(commBuffer); if (res != -1) { remaining -= res; bytesRead += res; if (messageStats != null) { messageStats.incReceivedBytes(res); } } else { throw new EOFException( "The connection has been reset while reading the payload"); } } } else { int bytesToRead = commBuffer.capacity() - commBuffer.position(); if (bytesRemaining < bytesToRead) { bytesToRead = bytesRemaining; } int pos = commBuffer.position(); while (bytesToRead > 0) { int res = inputStream.read(commBuffer.array(), pos, bytesToRead); if (res != -1) { bytesToRead -= res; pos += res; bytesRead += res; if (messageStats != null) { messageStats.incReceivedBytes(res); } } else { throw new EOFException( "The connection has been reset while reading the payload"); } } commBuffer.position(pos); } commBuffer.flip(); return bytesRead; } /** * Gets rid of all the parts that have been added to this message. */ public void clearParts() { for (Part part : partsList) { part.clear(); } currentPart = 0; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("type=").append(messageType); sb.append("; payloadLength=").append(payloadLength); sb.append("; numberOfParts=").append(numberOfParts); sb.append("; hasSecurePart=").append(isSecureMode()); sb.append("; transactionId=").append(transactionId); sb.append("; currentPart=").append(currentPart); sb.append("; messageModified=").append(messageModified); sb.append("; flags=").append(Integer.toHexString(flags)); for (int i = 0; i < numberOfParts; i++) { sb.append("; part[").append(i).append("]={"); sb.append(partsList[i]); sb.append("}"); } return sb.toString(); } // Set up a message on the server side. void setComms(ServerConnection sc, Socket socket, ByteBuffer bb, MessageStats msgStats) throws IOException { serverConnection = sc; setComms(socket, bb, msgStats); } // Set up a message on the client side. void setComms(Socket socket, ByteBuffer bb, MessageStats msgStats) throws IOException { socketChannel = socket.getChannel(); if (socketChannel == null) { setComms(socket, socket.getInputStream(), socket.getOutputStream(), bb, msgStats); } else { setComms(socket, null, null, bb, msgStats); } } // Set up a message on the client side. public void setComms(Socket socket, InputStream is, OutputStream os, ByteBuffer bb, MessageStats msgStats) { Assert.assertTrue(socket != null); this.socket = socket; socketChannel = socket.getChannel(); inputStream = is; outputStream = os; cachedCommBuffer = bb; messageStats = msgStats; } /** * Undo any state changes done by setComms. * * @since GemFire 5.7 */ public void unsetComms() { socket = null; socketChannel = null; inputStream = null; outputStream = null; cachedCommBuffer = null; messageStats = null; } /** * Sends this message to its receiver over its setOutputStream?? output stream. */ public void send() throws IOException { send(true); } public void send(ServerConnection servConn) throws IOException { if (serverConnection != servConn) { throw new IllegalStateException("this.sc was not correctly set"); } send(true); } /** * Sends this message to its receiver over its setOutputStream?? output stream. */ public void send(boolean clearMessage) throws IOException { sendBytes(clearMessage); } /** * Read a message, populating the state of this {@code Message} with information received via its * socket * * @param timeoutMillis timeout setting for reading the header (0 = no timeout) */ public void receiveWithHeaderReadTimeout(int timeoutMillis) throws IOException { if (socket != null) { synchronized (getCommBuffer()) { readHeaderAndBody(true, timeoutMillis); } } else { throw new IOException("Dead Connection"); } } /** * Populates the state of this {@code Message} with information received via its socket */ public void receive() throws IOException { if (socket != null) { synchronized (getCommBuffer()) { readHeaderAndBody(false, -1); } } else { throw new IOException("Dead Connection"); } } public void receive(ServerConnection sc, int maxMessageLength, Semaphore dataLimiter, Semaphore msgLimiter) throws IOException { serverConnection = sc; maxIncomingMessageLength = maxMessageLength; this.dataLimiter = dataLimiter; messageLimiter = msgLimiter; receive(); } }
apache/stanbol
35,558
ontologymanager/registry/src/main/java/org/apache/stanbol/ontologymanager/registry/impl/RegistryManagerImpl.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.stanbol.ontologymanager.registry.impl; import java.util.Collections; import java.util.Dictionary; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.apache.clerezza.rdf.core.access.TcManager; import org.apache.clerezza.rdf.core.serializedform.Parser; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.PropertyOption; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.stanbol.commons.owl.OWLOntologyManagerFactory; import org.apache.stanbol.commons.stanboltools.datafileprovider.DataFileProvider; import org.apache.stanbol.ontologymanager.multiplexer.clerezza.ontology.ClerezzaOntologyProvider; import org.apache.stanbol.ontologymanager.registry.api.RegistryContentException; import org.apache.stanbol.ontologymanager.registry.api.RegistryContentListener; import org.apache.stanbol.ontologymanager.registry.api.RegistryItemFactory; import org.apache.stanbol.ontologymanager.registry.api.RegistryManager; import org.apache.stanbol.ontologymanager.registry.api.model.CachingPolicy; import org.apache.stanbol.ontologymanager.registry.api.model.Library; import org.apache.stanbol.ontologymanager.registry.api.model.Registry; import org.apache.stanbol.ontologymanager.registry.api.model.RegistryItem; import org.apache.stanbol.ontologymanager.registry.api.model.RegistryItem.Type; import org.apache.stanbol.ontologymanager.registry.api.model.RegistryOntology; import org.apache.stanbol.ontologymanager.registry.impl.util.RegistryUtils; import org.apache.stanbol.ontologymanager.registry.xd.vocabulary.CODOVocabulary; import org.apache.stanbol.ontologymanager.servicesapi.OfflineConfiguration; import org.apache.stanbol.ontologymanager.servicesapi.ontology.OntologyProvider; import org.osgi.service.component.ComponentContext; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.io.IRIDocumentSource; import org.semanticweb.owlapi.io.OWLOntologyDocumentSource; import org.semanticweb.owlapi.io.StreamDocumentSource; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAnnotationValue; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLAxiomVisitor; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLIndividual; import org.semanticweb.owlapi.model.OWLNamedObject; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyAssertionAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyAlreadyExistsException; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyLoaderConfiguration; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.util.OWLAxiomVisitorAdapter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Default implementation of the registry manager, that listens to requests on its referenced resources and * issues loading requests accordingly. * * @author alexdma */ @Component(immediate = true, metatype = true) @Service(RegistryManager.class) public class RegistryManagerImpl implements RegistryManager, RegistryContentListener { private static final CachingPolicy _CACHING_POLICY_DEFAULT = CachingPolicy.CENTRALISED; private static final boolean _LAZY_LOADING_DEFAULT = true; private static final boolean _RETAIN_INCOMPLETE_DEFAULT = true; private static final OWLClass cRegistryLibrary, cOntology; private static final OWLAnnotationProperty hasOntologyAnn, isOntologyOfAnn; private static final OWLObjectProperty hasOntology, isOntologyOf; static { OWLDataFactory factory = OWLManager.getOWLDataFactory(); cOntology = factory.getOWLClass(IRI.create(CODOVocabulary.CODK_Ontology)); cRegistryLibrary = factory.getOWLClass(IRI.create(CODOVocabulary.CODD_OntologyLibrary)); isOntologyOf = factory.getOWLObjectProperty(IRI.create(CODOVocabulary.ODPM_IsOntologyOf)); hasOntology = factory.getOWLObjectProperty(IRI.create(CODOVocabulary.ODPM_HasOntology)); hasOntologyAnn = factory.getOWLAnnotationProperty(IRI.create(CODOVocabulary.ODPM_HasOntology)); isOntologyOfAnn = factory.getOWLAnnotationProperty(IRI.create(CODOVocabulary.ODPM_IsOntologyOf)); } @Reference private OntologyProvider<?> cache; @Property(name = RegistryManager.CACHING_POLICY, options = { @PropertyOption(value = '%' + RegistryManager.CACHING_POLICY + ".option.distributed", name = "DISTRIBUTED"), @PropertyOption(value = '%' + RegistryManager.CACHING_POLICY + ".option.centralised", name = "CENTRALISED")}, value = "CENTRALISED") private String cachingPolicyString; @Reference private DataFileProvider dataFileProvider; @Property(name = RegistryManager.LAZY_LOADING, boolValue = _LAZY_LOADING_DEFAULT) private boolean lazyLoading = _LAZY_LOADING_DEFAULT; /* Maps registries to libraries */ private Map<IRI,Set<IRI>> libraryIndex = new HashMap<IRI,Set<IRI>>(); @Property(name = RegistryManager.REGISTRY_LOCATIONS, cardinality = 1000, value = {"stanbol_network.owl"}) private String[] locations; private Logger log = LoggerFactory.getLogger(getClass()); @Reference private OfflineConfiguration offline; /* * Maps libraries (values) to ontologies (keys). This does not keep track of the loading status of each * library, as it is handled by the library itelf. */ private Map<IRI,Set<IRI>> ontologyIndex = new HashMap<IRI,Set<IRI>>(); private Map<IRI,RegistryItem> population = new TreeMap<IRI,RegistryItem>(); private Set<IRI> registries = new HashSet<IRI>(); @Property(name = RegistryManager.RETAIN_INCOMPLETE, boolValue = _RETAIN_INCOMPLETE_DEFAULT) private boolean retainIncomplete = _RETAIN_INCOMPLETE_DEFAULT; private RegistryItemFactory riFactory; /** * This default constructor is <b>only</b> intended to be used by the OSGI environment with Service * Component Runtime support. * <p> * DO NOT USE to manually create instances - the RegistryManagerImpl instances do need to be configured! * YOU NEED TO USE {@link #RegistryManagerImpl(OfflineConfiguration, OntologyProvider, Dictionary))} or * its overloads, to parse the configuration and then initialise the rule store if running outside an OSGI * environment. */ public RegistryManagerImpl() {} /** * To be invoked by non-OSGi environments. * * @param the * configuration registry manager-specific configuration */ public RegistryManagerImpl(OfflineConfiguration offline, OntologyProvider<?> cache, Dictionary<String,Object> configuration) { this(); this.offline = offline; this.cache = cache; activate(configuration); } @SuppressWarnings("unchecked") @Activate protected void activate(ComponentContext context) { log.info("in {} activate with context {}", getClass(), context); if (context == null) { throw new IllegalStateException("No valid" + ComponentContext.class + " parsed in activate!"); } activate((Dictionary<String,Object>) context.getProperties()); } protected void activate(Dictionary<String,Object> configuration) { // Parse configuration. try { lazyLoading = (Boolean) (configuration.get(RegistryManager.LAZY_LOADING)); } catch (Exception ex) { lazyLoading = _LAZY_LOADING_DEFAULT; } try { retainIncomplete = (Boolean) (configuration.get(RegistryManager.RETAIN_INCOMPLETE)); } catch (Exception ex) { retainIncomplete = _RETAIN_INCOMPLETE_DEFAULT; } Object obj = configuration.get(RegistryManager.REGISTRY_LOCATIONS); if (obj instanceof String[]) locations = (String[]) obj; else if (obj instanceof String) locations = new String[] {(String) obj}; if (locations == null) locations = new String[] {"stanbol_network.owl"}; Object cachingPolicy = configuration.get(RegistryManager.CACHING_POLICY); if (cachingPolicy == null) { this.cachingPolicyString = _CACHING_POLICY_DEFAULT.name(); } else { this.cachingPolicyString = cachingPolicy.toString(); } final IRI[] offlineResources; if (this.offline != null) { List<IRI> paths = offline.getOntologySourceLocations(); if (paths != null) offlineResources = paths.toArray(new IRI[0]); // There are no offline paths. else offlineResources = new IRI[0]; } // There's no offline configuration at all. else offlineResources = new IRI[0]; // Used only for creating the registry model, do not use for caching. OWLOntologyManager mgr = OWLOntologyManagerFactory.createOWLOntologyManager(offlineResources); OWLOntologyLoaderConfiguration conf = new OWLOntologyLoaderConfiguration(); // If we are retaining incomplete registries, do not throw exceptions if imports fail. conf.setSilentMissingImportsHandling(retainIncomplete); // Load registries Set<OWLOntology> regOnts = new HashSet<OWLOntology>(); for (String loc : locations) { try { IRI iri = IRI.create(loc); OWLOntologyDocumentSource src = null; OWLOntology o = null; if (iri.isAbsolute()) src = new IRIDocumentSource(iri); else { // Relative IRI : use data file provider log.debug("Found relative IRI {} . Will try to retrieve from data file providers.", iri); Map<String,String> info = new HashMap<String,String>(); if (dataFileProvider != null && dataFileProvider.isAvailable(null, loc, info)) src = new StreamDocumentSource( dataFileProvider.getInputStream(null, loc, info)); } if (src != null) o = mgr.loadOntologyFromOntologyDocument(src, conf); if (o != null) regOnts.add(o); else log.warn("Failed to obtain OWL ontology from resource {}", loc); } catch (OWLOntologyAlreadyExistsException e) { log.info("Skipping cached ontology {}.", e.getOntologyID()); continue; } catch (OWLOntologyCreationException e) { log.warn("Failed to load ontology " + loc + " - Skipping...", e); continue; } catch (Exception e) { log.warn("Invalid registry configuration " + loc + " - Skipping...", e); continue; } } // Create and set the cache. if (cachingPolicyString.equals(CachingPolicy.CENTRALISED.name())) { // this.cache = OWLOntologyManagerFactory.createOWLOntologyManager(offlineResources); if (cache == null) { log.warn("Caching policy is set as Centralised, but no ontology provider is supplied. Will use new in-memory tcProvider."); cache = new ClerezzaOntologyProvider(TcManager.getInstance(), offline, Parser.getInstance()); } // else sta bene cosi' } else if (cachingPolicyString.equals(CachingPolicy.DISTRIBUTED.name())) { this.cache = null; } riFactory = new RegistryItemFactoryImpl(cache); // Build the model. createModel(regOnts); // Set the cache on libraries. Set<RegistryItem> visited = new HashSet<RegistryItem>(); for (Registry reg : getRegistries()) for (RegistryItem child : reg.getChildren()) if (!visited.contains(child)) { if (child instanceof Library) { if (this.cache != null) ((Library) child).setCache(this.cache); else ((Library) child).setCache(new ClerezzaOntologyProvider(TcManager.getInstance(), offline, Parser.getInstance())); } visited.add(child); } if (isLazyLoading()) { // Nothing to do about it at the moment. } else { loadEager(); } } @Override public void addRegistry(Registry registry) { // TODO: automatically set the cache if unset or non conform to the caching policy. try { population.put(registry.getIRI(), registry); registries.add(registry.getIRI()); updateLocations(); } catch (Exception e) { log.error("Failed to add ontology registry.", e); } } @Override public void clearRegistries() { for (IRI id : registries) if (registries.remove(id)) population.remove(id); updateLocations(); } /** * @deprecated with each library having its own cache, load balancing is no longer necessary * @return */ protected Registry computeBestCandidate(Library lib) { Map<IRI,Float> loadFactors = computeLoadFactors(); IRI current = null; float lowest = 1.0f; for (RegistryItem item : lib.getParents()) { IRI iri = item.getIRI(); if (loadFactors.containsKey(iri)) { float f = loadFactors.get(iri); if (f < lowest) { lowest = f; current = iri; } } } return (Registry) (population.get(current)); } /** * @deprecated with each library having its own cache, load balancing is no longer necessary * @return */ protected Map<IRI,Float> computeLoadFactors() { Map<IRI,Float> loadFactors = new HashMap<IRI,Float>(); for (Registry r : getRegistries()) { int tot = 0, num = 0; RegistryItem[] children = r.getChildren(); for (int i = 0; i < children.length; i++) { if (children[i] instanceof Library) { if (((Library) children[i]).isLoaded()) num++; tot++; } } loadFactors.put(r.getIRI(), (float) num / (float) tot); } return loadFactors; } @Override public Set<Registry> createModel(Set<OWLOntology> registryOntologies) { Set<Registry> results = new HashSet<Registry>(); // Reset population population.clear(); // Build the transitive imports closure of the union. Set<OWLOntology> closure = new HashSet<OWLOntology>(); for (OWLOntology rego : registryOntologies) closure.addAll(rego.getOWLOntologyManager().getImportsClosure(rego)); /* * For each value in this map, index 0 is the score of the library class, while 1 is the score of the * ontology class. */ final Map<IRI,int[]> candidateTypes = new HashMap<IRI,int[]>(); /* * Scans class assertions and object property values and tries to determine the type of each * individual it finds. */ OWLAxiomVisitor scanner = new OWLAxiomVisitorAdapter() { /* * For a given identifier, returns the array of integers whose value determine the likelihood if * the corresponding entity being a library or an ontology. If no such array exists, it is * created. */ private int[] checkScores(IRI key) { int[] scores; if (candidateTypes.containsKey(key)) scores = candidateTypes.get(key); else { scores = new int[] {0, 0}; candidateTypes.put(key, scores); } return scores; } @Override public void visit(OWLAnnotationAssertionAxiom axiom) { /* * Works like object property assertions, in case hasOntology and isOntologyOf are not * detected to be object properties (e.g. due to a failure to load codolight or the registry * metamodel). */ OWLAnnotationProperty prop = axiom.getProperty(); if (hasOntologyAnn.equals(prop)) { IRI iri; // The axiom subject gets a +1 in its library score. OWLObject ind = axiom.getSubject(); if (ind instanceof IRI) { iri = (IRI) ind; checkScores(iri)[0]++; } // The axiom object gets a +1 in its ontology score. ind = axiom.getValue(); if (ind instanceof IRI) { iri = (IRI) ind; checkScores(iri)[1]++; } } else if (isOntologyOfAnn.equals(prop)) { IRI iri; // The axiom subject gets a +1 in its ontology score. OWLObject ind = axiom.getSubject(); if (ind instanceof IRI) { iri = (IRI) ind; checkScores(iri)[1]++; } // The axiom object gets a +1 in its library score. ind = axiom.getValue(); if (ind instanceof IRI) { iri = (IRI) ind; checkScores(iri)[0]++; } } } @Override public void visit(OWLClassAssertionAxiom axiom) { OWLIndividual ind = axiom.getIndividual(); // Do not accept anonymous registry items. if (ind.isAnonymous()) return; IRI iri = ind.asOWLNamedIndividual().getIRI(); int[] scores = checkScores(iri); OWLClassExpression type = axiom.getClassExpression(); // If the type is stated to be a library, increase its library score. if (cRegistryLibrary.equals(type)) { scores[0]++; } else // If the type is stated to be an ontology, increase its ontology score. if (cOntology.equals(type)) { scores[1]++; } } @Override public void visit(OWLObjectPropertyAssertionAxiom axiom) { OWLObjectPropertyExpression prop = axiom.getProperty(); if (hasOntology.equals(prop)) { IRI iri; // The axiom subject gets a +1 in its library score. OWLIndividual ind = axiom.getSubject(); if (!ind.isAnonymous()) { iri = ind.asOWLNamedIndividual().getIRI(); checkScores(iri)[0]++; } // The axiom object gets a +1 in its ontology score. ind = axiom.getObject(); if (!ind.isAnonymous()) { iri = ind.asOWLNamedIndividual().getIRI(); checkScores(iri)[1]++; } } else if (isOntologyOf.equals(prop)) { IRI iri; // The axiom subject gets a +1 in its ontology score. OWLIndividual ind = axiom.getSubject(); if (!ind.isAnonymous()) { iri = ind.asOWLNamedIndividual().getIRI(); checkScores(iri)[1]++; } // The axiom object gets a +1 in its library score. ind = axiom.getObject(); if (!ind.isAnonymous()) { iri = ind.asOWLNamedIndividual().getIRI(); checkScores(iri)[0]++; } } } }; // First pass to determine the types. for (OWLOntology o : closure) for (OWLAxiom ax : o.getAxioms()) ax.accept(scanner); // Then populate on the registry OWLDataFactory df = OWLManager.getOWLDataFactory(); for (IRI iri : candidateTypes.keySet()) { int[] scores = candidateTypes.get(iri); if (scores != null && (scores[0] > 0 || scores[1] > 0)) { if (scores[0] > 0 && scores[1] == 0) population.put(iri, riFactory.createLibrary(df.getOWLNamedIndividual(iri))); else if (scores[0] == 0 && scores[1] > 0) population.put(iri, riFactory.createRegistryOntology(df.getOWLNamedIndividual(iri))); } // else log.warn("Unable to determine type for registry item {}", iri); } for (OWLOntology oReg : registryOntologies) { try { results.add(populateRegistry(oReg)); } catch (RegistryContentException e) { log.error("An error occurred while populating an ontology registry.", e); } } return results; } @Deactivate protected void deactivate(ComponentContext context) { lazyLoading = _LAZY_LOADING_DEFAULT; locations = null; log.info("in {} deactivate with context {}", getClass(), context); } @Override public CachingPolicy getCachingPolicy() { try { return CachingPolicy.valueOf(cachingPolicyString); } catch (IllegalArgumentException e) { log.warn("The value \"" + cachingPolicyString + "\" configured as default CachingPolicy does not match any value of the Enumeration! " + "Return the default policy as defined by the " + CachingPolicy.class + "."); return _CACHING_POLICY_DEFAULT; } } @Override public Set<Library> getLibraries() { Set<Library> results = new HashSet<Library>(); for (IRI key : population.keySet()) { RegistryItem item = population.get(key); if (item instanceof Library) results.add((Library) item); } return results; } @Override public Set<Library> getLibraries(IRI ontologyID) { Set<Library> results = new HashSet<Library>(); RegistryItem ri = population.get(ontologyID); if (ri != null) for (RegistryItem item : ri.getParents()) if (item instanceof Library) results.add((Library) item); return results; } @Override public Library getLibrary(IRI id) { RegistryItem item = population.get(id); if (item != null && item instanceof Library) return (Library) item; return null; } @Override public OfflineConfiguration getOfflineConfiguration() { return offline; } @Override public Set<Registry> getRegistries() { Set<Registry> results = new HashSet<Registry>(); for (IRI key : population.keySet()) { RegistryItem item = population.get(key); if (item instanceof Registry) results.add((Registry) item); } return results; } @Override public Set<Registry> getRegistries(IRI libraryID) { Set<Registry> results = new HashSet<Registry>(); try { for (RegistryItem item : population.get(libraryID).getParents()) if (item instanceof Registry) results.add((Registry) item); } catch (NullPointerException ex) { return results; } return results; } @Override public Registry getRegistry(IRI id) { RegistryItem item = population.get(id); return item != null && item instanceof Registry ? (Registry) item : null; } @Override public boolean isLazyLoading() { return lazyLoading; } private void loadEager() { for (RegistryItem item : population.values()) { if (item instanceof Library && !((Library) item).isLoaded()) { // TODO: implement ontology request targets. if (CachingPolicy.CENTRALISED.equals(getCachingPolicy()) && this.cache != null) { ((Library) item).loadOntologies(this.cache); } else if (CachingPolicy.DISTRIBUTED.equals(getCachingPolicy())) { Library lib = (Library) item; lib.loadOntologies(lib.getCache()); } else { log.error("Tried to load ontology resource {} using a null cache.", item); } } } } protected Library populateLibrary(OWLNamedObject ind, Set<OWLOntology> registries) throws RegistryContentException { IRI libId = ind.getIRI(); RegistryItem lib = null; if (population.containsKey(libId)) { // We are not allowing multityping either. lib = population.get(libId); if (!(lib instanceof Library)) throw new RegistryContentException( "Inconsistent multityping: for item " + libId + " : {" + Library.class + ", " + lib.getClass() + "}"); } else { lib = riFactory.createLibrary(ind); try { population.put(lib.getIRI(), lib); } catch (Exception e) { log.error("Invalid identifier for library item " + lib, e); return null; } } // EXIT nodes. Set<OWLNamedObject> ironts = new HashSet<OWLNamedObject>(); OWLDataFactory df = OWLManager.getOWLDataFactory(); for (OWLOntology o : registries) { if (ind instanceof OWLIndividual) { // Get usages of hasOntology as an object property for (OWLIndividual value : ((OWLIndividual) ind).getObjectPropertyValues(hasOntology, o)) if (value.isNamed()) ironts.add(value.asOWLNamedIndividual()); // Get usages of hasOntology as an annotation property for (OWLAnnotationAssertionAxiom ann : o.getAnnotationAssertionAxioms(ind.getIRI())) if (hasOntologyAnn.equals(ann.getProperty())) { OWLAnnotationValue value = ann.getValue(); if (value instanceof OWLNamedObject) ironts.add((OWLNamedObject) value); else if (value instanceof IRI) ironts.add(df.getOWLNamedIndividual((IRI) value)); } } } for (OWLNamedObject iront : ironts) { IRI childId = iront.getIRI(); // If some populate*() method has created it, it will be there. RegistryItem ront = population.get(childId); // Otherwise populating it will also put it in population. if (ront == null) ront = populateOntology(iront, registries); lib.addChild(ront); if (ontologyIndex.get(childId) == null) ontologyIndex.put(childId, new HashSet<IRI>()); ontologyIndex.get(childId).add(libId); } return (Library) lib; } protected RegistryOntology populateOntology(OWLNamedObject ind, Set<OWLOntology> registries) throws RegistryContentException { IRI ontId = ind.getIRI(); RegistryItem ront = null; if (population.containsKey(ontId)) { // We are not allowing multityping either. ront = population.get(ontId); if (!(ront instanceof RegistryOntology)) throw new RegistryContentException( "Inconsistent multityping: for item " + ontId + " : {" + RegistryOntology.class + ", " + ront.getClass() + "}"); } else { ront = riFactory.createRegistryOntology(ind); try { population.put(ront.getIRI(), ront); } catch (Exception e) { log.error("Invalid identifier for library item " + ront, e); return null; } } // EXIT nodes. Set<OWLNamedObject> libs = new HashSet<OWLNamedObject>(); OWLDataFactory df = OWLManager.getOWLDataFactory(); for (OWLOntology o : registries) { if (ind instanceof OWLIndividual) { // Get usages of isOntologyOf as an object property for (OWLIndividual value : ((OWLIndividual) ind).getObjectPropertyValues(isOntologyOf, o)) if (value.isNamed()) libs.add(value.asOWLNamedIndividual()); // Get usages of isOntologyOf as an annotation property for (OWLAnnotationAssertionAxiom ann : o.getAnnotationAssertionAxioms(ind.getIRI())) if (isOntologyOfAnn.equals(ann.getProperty())) { OWLAnnotationValue value = ann.getValue(); if (value instanceof OWLNamedObject) libs.add((OWLNamedObject) value); else if (value instanceof IRI) libs.add(df.getOWLNamedIndividual((IRI) value)); } } } for (OWLNamedObject ilib : libs) { IRI parentId = ilib.getIRI(); // If some populate*() method has created it, it will be there. RegistryItem rlib = population.get(parentId); // Otherwise populating it will also put it in population. if (rlib == null) rlib = populateLibrary(ilib, registries); ront.addParent(rlib); if (ontologyIndex.get(ontId) == null) ontologyIndex.put(ontId, new HashSet<IRI>()); ontologyIndex.get(ontId).add(parentId); } return (RegistryOntology) ront; } protected Registry populateRegistry(OWLOntology registry) throws RegistryContentException { log.debug("Populating registry content from ontology {}", registry); Registry reg = riFactory.createRegistry(registry); Set<OWLOntology> closure = registry.getOWLOntologyManager().getImportsClosure(registry); // Just scan all individuals. Recurse in case the registry imports more registries. for (OWLIndividual ind : registry.getIndividualsInSignature(true)) { // We do not allow anonymous registry items. if (ind.isAnonymous()) continue; RegistryItem item = null; // IRI id = ind.asOWLNamedIndividual().getIRI(); Type t = RegistryUtils.getType(ind, closure); if (t == null) { log.warn("Undetermined type for registry ontology individual {}", ind); continue; } switch (t) { case LIBRARY: log.debug("Found library for individual {}", ind); // Create the library and attach to parent and children item = populateLibrary(ind.asOWLNamedIndividual(), closure); reg.addChild(item); item.addRegistryContentListener(this); break; case ONTOLOGY: log.debug("Found ontology for individual {}", ind); // Create the ontology and attach to parent item = populateOntology(ind.asOWLNamedIndividual(), closure); item.addRegistryContentListener(this); // We don't know where to attach it within this method. break; default: break; } } try { reg.addRegistryContentListener(this); log.info("Registry {} added.", reg.getIRI()); population.put(reg.getIRI(), reg); } catch (Exception e) { log.error("Invalid identifier for library item " + reg, e); return null; } return reg; } @Override public void registryContentRequested(RegistryItem requestTarget) { log.debug("In {} registry content was requested on {}.", getClass(), requestTarget); // TODO: implement ontology request targets. if (CachingPolicy.CENTRALISED.equals(getCachingPolicy()) && this.cache != null) { if (requestTarget instanceof Library && !((Library) requestTarget).isLoaded()) ((Library) requestTarget) .loadOntologies(this.cache); } else if (CachingPolicy.DISTRIBUTED.equals(getCachingPolicy())) { if (requestTarget instanceof Library && !((Library) requestTarget).isLoaded()) { Library lib = (Library) requestTarget; lib.loadOntologies(lib.getCache()); } } else { log.error("Tried to load ontology resource {} using a null cache.", requestTarget); } } @Override public void removeRegistry(IRI registryId) { // TODO: automatically remove ontologies from the cache if centralised. registries.remove(registryId); updateLocations(); } @Override public void setLazyLoading(boolean lazy) { // Warning: do not use in constructor! this.lazyLoading = lazy; if (!lazy) loadEager(); } protected synchronized void updateLocations() { Set<IRI> locations = Collections.unmodifiableSet(registries); this.locations = locations.toArray(new String[0]); } }
apache/hive
35,291
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableStatsRequest.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.metastore.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class TableStatsRequest implements org.apache.thrift.TBase<TableStatsRequest, TableStatsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<TableStatsRequest> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TableStatsRequest"); private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbName", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField TBL_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tblName", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField COL_NAMES_FIELD_DESC = new org.apache.thrift.protocol.TField("colNames", org.apache.thrift.protocol.TType.LIST, (short)3); private static final org.apache.thrift.protocol.TField CAT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("catName", org.apache.thrift.protocol.TType.STRING, (short)4); private static final org.apache.thrift.protocol.TField VALID_WRITE_ID_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("validWriteIdList", org.apache.thrift.protocol.TType.STRING, (short)5); private static final org.apache.thrift.protocol.TField ENGINE_FIELD_DESC = new org.apache.thrift.protocol.TField("engine", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, (short)7); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new TableStatsRequestStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new TableStatsRequestTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable java.lang.String dbName; // required private @org.apache.thrift.annotation.Nullable java.lang.String tblName; // required private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> colNames; // required private @org.apache.thrift.annotation.Nullable java.lang.String catName; // optional private @org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList; // optional private @org.apache.thrift.annotation.Nullable java.lang.String engine; // optional private long id; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { DB_NAME((short)1, "dbName"), TBL_NAME((short)2, "tblName"), COL_NAMES((short)3, "colNames"), CAT_NAME((short)4, "catName"), VALID_WRITE_ID_LIST((short)5, "validWriteIdList"), ENGINE((short)6, "engine"), ID((short)7, "id"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // DB_NAME return DB_NAME; case 2: // TBL_NAME return TBL_NAME; case 3: // COL_NAMES return COL_NAMES; case 4: // CAT_NAME return CAT_NAME; case 5: // VALID_WRITE_ID_LIST return VALID_WRITE_ID_LIST; case 6: // ENGINE return ENGINE; case 7: // ID return ID; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __ID_ISSET_ID = 0; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.CAT_NAME,_Fields.VALID_WRITE_ID_LIST,_Fields.ENGINE,_Fields.ID}; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("dbName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("tblName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.COL_NAMES, new org.apache.thrift.meta_data.FieldMetaData("colNames", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.CAT_NAME, new org.apache.thrift.meta_data.FieldMetaData("catName", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.VALID_WRITE_ID_LIST, new org.apache.thrift.meta_data.FieldMetaData("validWriteIdList", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.ENGINE, new org.apache.thrift.meta_data.FieldMetaData("engine", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TableStatsRequest.class, metaDataMap); } public TableStatsRequest() { this.engine = "hive"; this.id = -1L; } public TableStatsRequest( java.lang.String dbName, java.lang.String tblName, java.util.List<java.lang.String> colNames) { this(); this.dbName = dbName; this.tblName = tblName; this.colNames = colNames; } /** * Performs a deep copy on <i>other</i>. */ public TableStatsRequest(TableStatsRequest other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetDbName()) { this.dbName = other.dbName; } if (other.isSetTblName()) { this.tblName = other.tblName; } if (other.isSetColNames()) { java.util.List<java.lang.String> __this__colNames = new java.util.ArrayList<java.lang.String>(other.colNames); this.colNames = __this__colNames; } if (other.isSetCatName()) { this.catName = other.catName; } if (other.isSetValidWriteIdList()) { this.validWriteIdList = other.validWriteIdList; } if (other.isSetEngine()) { this.engine = other.engine; } this.id = other.id; } public TableStatsRequest deepCopy() { return new TableStatsRequest(this); } @Override public void clear() { this.dbName = null; this.tblName = null; this.colNames = null; this.catName = null; this.validWriteIdList = null; this.engine = "hive"; this.id = -1L; } @org.apache.thrift.annotation.Nullable public java.lang.String getDbName() { return this.dbName; } public void setDbName(@org.apache.thrift.annotation.Nullable java.lang.String dbName) { this.dbName = dbName; } public void unsetDbName() { this.dbName = null; } /** Returns true if field dbName is set (has been assigned a value) and false otherwise */ public boolean isSetDbName() { return this.dbName != null; } public void setDbNameIsSet(boolean value) { if (!value) { this.dbName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getTblName() { return this.tblName; } public void setTblName(@org.apache.thrift.annotation.Nullable java.lang.String tblName) { this.tblName = tblName; } public void unsetTblName() { this.tblName = null; } /** Returns true if field tblName is set (has been assigned a value) and false otherwise */ public boolean isSetTblName() { return this.tblName != null; } public void setTblNameIsSet(boolean value) { if (!value) { this.tblName = null; } } public int getColNamesSize() { return (this.colNames == null) ? 0 : this.colNames.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<java.lang.String> getColNamesIterator() { return (this.colNames == null) ? null : this.colNames.iterator(); } public void addToColNames(java.lang.String elem) { if (this.colNames == null) { this.colNames = new java.util.ArrayList<java.lang.String>(); } this.colNames.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<java.lang.String> getColNames() { return this.colNames; } public void setColNames(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> colNames) { this.colNames = colNames; } public void unsetColNames() { this.colNames = null; } /** Returns true if field colNames is set (has been assigned a value) and false otherwise */ public boolean isSetColNames() { return this.colNames != null; } public void setColNamesIsSet(boolean value) { if (!value) { this.colNames = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getCatName() { return this.catName; } public void setCatName(@org.apache.thrift.annotation.Nullable java.lang.String catName) { this.catName = catName; } public void unsetCatName() { this.catName = null; } /** Returns true if field catName is set (has been assigned a value) and false otherwise */ public boolean isSetCatName() { return this.catName != null; } public void setCatNameIsSet(boolean value) { if (!value) { this.catName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getValidWriteIdList() { return this.validWriteIdList; } public void setValidWriteIdList(@org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList) { this.validWriteIdList = validWriteIdList; } public void unsetValidWriteIdList() { this.validWriteIdList = null; } /** Returns true if field validWriteIdList is set (has been assigned a value) and false otherwise */ public boolean isSetValidWriteIdList() { return this.validWriteIdList != null; } public void setValidWriteIdListIsSet(boolean value) { if (!value) { this.validWriteIdList = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getEngine() { return this.engine; } public void setEngine(@org.apache.thrift.annotation.Nullable java.lang.String engine) { this.engine = engine; } public void unsetEngine() { this.engine = null; } /** Returns true if field engine is set (has been assigned a value) and false otherwise */ public boolean isSetEngine() { return this.engine != null; } public void setEngineIsSet(boolean value) { if (!value) { this.engine = null; } } public long getId() { return this.id; } public void setId(long id) { this.id = id; setIdIsSet(true); } public void unsetId() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID); } /** Returns true if field id is set (has been assigned a value) and false otherwise */ public boolean isSetId() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID); } public void setIdIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value); } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case DB_NAME: if (value == null) { unsetDbName(); } else { setDbName((java.lang.String)value); } break; case TBL_NAME: if (value == null) { unsetTblName(); } else { setTblName((java.lang.String)value); } break; case COL_NAMES: if (value == null) { unsetColNames(); } else { setColNames((java.util.List<java.lang.String>)value); } break; case CAT_NAME: if (value == null) { unsetCatName(); } else { setCatName((java.lang.String)value); } break; case VALID_WRITE_ID_LIST: if (value == null) { unsetValidWriteIdList(); } else { setValidWriteIdList((java.lang.String)value); } break; case ENGINE: if (value == null) { unsetEngine(); } else { setEngine((java.lang.String)value); } break; case ID: if (value == null) { unsetId(); } else { setId((java.lang.Long)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case DB_NAME: return getDbName(); case TBL_NAME: return getTblName(); case COL_NAMES: return getColNames(); case CAT_NAME: return getCatName(); case VALID_WRITE_ID_LIST: return getValidWriteIdList(); case ENGINE: return getEngine(); case ID: return getId(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case DB_NAME: return isSetDbName(); case TBL_NAME: return isSetTblName(); case COL_NAMES: return isSetColNames(); case CAT_NAME: return isSetCatName(); case VALID_WRITE_ID_LIST: return isSetValidWriteIdList(); case ENGINE: return isSetEngine(); case ID: return isSetId(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof TableStatsRequest) return this.equals((TableStatsRequest)that); return false; } public boolean equals(TableStatsRequest that) { if (that == null) return false; if (this == that) return true; boolean this_present_dbName = true && this.isSetDbName(); boolean that_present_dbName = true && that.isSetDbName(); if (this_present_dbName || that_present_dbName) { if (!(this_present_dbName && that_present_dbName)) return false; if (!this.dbName.equals(that.dbName)) return false; } boolean this_present_tblName = true && this.isSetTblName(); boolean that_present_tblName = true && that.isSetTblName(); if (this_present_tblName || that_present_tblName) { if (!(this_present_tblName && that_present_tblName)) return false; if (!this.tblName.equals(that.tblName)) return false; } boolean this_present_colNames = true && this.isSetColNames(); boolean that_present_colNames = true && that.isSetColNames(); if (this_present_colNames || that_present_colNames) { if (!(this_present_colNames && that_present_colNames)) return false; if (!this.colNames.equals(that.colNames)) return false; } boolean this_present_catName = true && this.isSetCatName(); boolean that_present_catName = true && that.isSetCatName(); if (this_present_catName || that_present_catName) { if (!(this_present_catName && that_present_catName)) return false; if (!this.catName.equals(that.catName)) return false; } boolean this_present_validWriteIdList = true && this.isSetValidWriteIdList(); boolean that_present_validWriteIdList = true && that.isSetValidWriteIdList(); if (this_present_validWriteIdList || that_present_validWriteIdList) { if (!(this_present_validWriteIdList && that_present_validWriteIdList)) return false; if (!this.validWriteIdList.equals(that.validWriteIdList)) return false; } boolean this_present_engine = true && this.isSetEngine(); boolean that_present_engine = true && that.isSetEngine(); if (this_present_engine || that_present_engine) { if (!(this_present_engine && that_present_engine)) return false; if (!this.engine.equals(that.engine)) return false; } boolean this_present_id = true && this.isSetId(); boolean that_present_id = true && that.isSetId(); if (this_present_id || that_present_id) { if (!(this_present_id && that_present_id)) return false; if (this.id != that.id) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetDbName()) ? 131071 : 524287); if (isSetDbName()) hashCode = hashCode * 8191 + dbName.hashCode(); hashCode = hashCode * 8191 + ((isSetTblName()) ? 131071 : 524287); if (isSetTblName()) hashCode = hashCode * 8191 + tblName.hashCode(); hashCode = hashCode * 8191 + ((isSetColNames()) ? 131071 : 524287); if (isSetColNames()) hashCode = hashCode * 8191 + colNames.hashCode(); hashCode = hashCode * 8191 + ((isSetCatName()) ? 131071 : 524287); if (isSetCatName()) hashCode = hashCode * 8191 + catName.hashCode(); hashCode = hashCode * 8191 + ((isSetValidWriteIdList()) ? 131071 : 524287); if (isSetValidWriteIdList()) hashCode = hashCode * 8191 + validWriteIdList.hashCode(); hashCode = hashCode * 8191 + ((isSetEngine()) ? 131071 : 524287); if (isSetEngine()) hashCode = hashCode * 8191 + engine.hashCode(); hashCode = hashCode * 8191 + ((isSetId()) ? 131071 : 524287); if (isSetId()) hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(id); return hashCode; } @Override public int compareTo(TableStatsRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetDbName(), other.isSetDbName()); if (lastComparison != 0) { return lastComparison; } if (isSetDbName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbName, other.dbName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTblName(), other.isSetTblName()); if (lastComparison != 0) { return lastComparison; } if (isSetTblName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tblName, other.tblName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetColNames(), other.isSetColNames()); if (lastComparison != 0) { return lastComparison; } if (isSetColNames()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.colNames, other.colNames); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetCatName(), other.isSetCatName()); if (lastComparison != 0) { return lastComparison; } if (isSetCatName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.catName, other.catName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetValidWriteIdList(), other.isSetValidWriteIdList()); if (lastComparison != 0) { return lastComparison; } if (isSetValidWriteIdList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validWriteIdList, other.validWriteIdList); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetEngine(), other.isSetEngine()); if (lastComparison != 0) { return lastComparison; } if (isSetEngine()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.engine, other.engine); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetId(), other.isSetId()); if (lastComparison != 0) { return lastComparison; } if (isSetId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("TableStatsRequest("); boolean first = true; sb.append("dbName:"); if (this.dbName == null) { sb.append("null"); } else { sb.append(this.dbName); } first = false; if (!first) sb.append(", "); sb.append("tblName:"); if (this.tblName == null) { sb.append("null"); } else { sb.append(this.tblName); } first = false; if (!first) sb.append(", "); sb.append("colNames:"); if (this.colNames == null) { sb.append("null"); } else { sb.append(this.colNames); } first = false; if (isSetCatName()) { if (!first) sb.append(", "); sb.append("catName:"); if (this.catName == null) { sb.append("null"); } else { sb.append(this.catName); } first = false; } if (isSetValidWriteIdList()) { if (!first) sb.append(", "); sb.append("validWriteIdList:"); if (this.validWriteIdList == null) { sb.append("null"); } else { sb.append(this.validWriteIdList); } first = false; } if (isSetEngine()) { if (!first) sb.append(", "); sb.append("engine:"); if (this.engine == null) { sb.append("null"); } else { sb.append(this.engine); } first = false; } if (isSetId()) { if (!first) sb.append(", "); sb.append("id:"); sb.append(this.id); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetDbName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'dbName' is unset! Struct:" + toString()); } if (!isSetTblName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'tblName' is unset! Struct:" + toString()); } if (!isSetColNames()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'colNames' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class TableStatsRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public TableStatsRequestStandardScheme getScheme() { return new TableStatsRequestStandardScheme(); } } private static class TableStatsRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<TableStatsRequest> { public void read(org.apache.thrift.protocol.TProtocol iprot, TableStatsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // DB_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.dbName = iprot.readString(); struct.setDbNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // TBL_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.tblName = iprot.readString(); struct.setTblNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // COL_NAMES if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list618 = iprot.readListBegin(); struct.colNames = new java.util.ArrayList<java.lang.String>(_list618.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem619; for (int _i620 = 0; _i620 < _list618.size; ++_i620) { _elem619 = iprot.readString(); struct.colNames.add(_elem619); } iprot.readListEnd(); } struct.setColNamesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // CAT_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.catName = iprot.readString(); struct.setCatNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // VALID_WRITE_ID_LIST if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.validWriteIdList = iprot.readString(); struct.setValidWriteIdListIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // ENGINE if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.engine = iprot.readString(); struct.setEngineIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.id = iprot.readI64(); struct.setIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, TableStatsRequest struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.dbName != null) { oprot.writeFieldBegin(DB_NAME_FIELD_DESC); oprot.writeString(struct.dbName); oprot.writeFieldEnd(); } if (struct.tblName != null) { oprot.writeFieldBegin(TBL_NAME_FIELD_DESC); oprot.writeString(struct.tblName); oprot.writeFieldEnd(); } if (struct.colNames != null) { oprot.writeFieldBegin(COL_NAMES_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.colNames.size())); for (java.lang.String _iter621 : struct.colNames) { oprot.writeString(_iter621); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } if (struct.catName != null) { if (struct.isSetCatName()) { oprot.writeFieldBegin(CAT_NAME_FIELD_DESC); oprot.writeString(struct.catName); oprot.writeFieldEnd(); } } if (struct.validWriteIdList != null) { if (struct.isSetValidWriteIdList()) { oprot.writeFieldBegin(VALID_WRITE_ID_LIST_FIELD_DESC); oprot.writeString(struct.validWriteIdList); oprot.writeFieldEnd(); } } if (struct.engine != null) { if (struct.isSetEngine()) { oprot.writeFieldBegin(ENGINE_FIELD_DESC); oprot.writeString(struct.engine); oprot.writeFieldEnd(); } } if (struct.isSetId()) { oprot.writeFieldBegin(ID_FIELD_DESC); oprot.writeI64(struct.id); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class TableStatsRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public TableStatsRequestTupleScheme getScheme() { return new TableStatsRequestTupleScheme(); } } private static class TableStatsRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<TableStatsRequest> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, TableStatsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; oprot.writeString(struct.dbName); oprot.writeString(struct.tblName); { oprot.writeI32(struct.colNames.size()); for (java.lang.String _iter622 : struct.colNames) { oprot.writeString(_iter622); } } java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetCatName()) { optionals.set(0); } if (struct.isSetValidWriteIdList()) { optionals.set(1); } if (struct.isSetEngine()) { optionals.set(2); } if (struct.isSetId()) { optionals.set(3); } oprot.writeBitSet(optionals, 4); if (struct.isSetCatName()) { oprot.writeString(struct.catName); } if (struct.isSetValidWriteIdList()) { oprot.writeString(struct.validWriteIdList); } if (struct.isSetEngine()) { oprot.writeString(struct.engine); } if (struct.isSetId()) { oprot.writeI64(struct.id); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, TableStatsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.dbName = iprot.readString(); struct.setDbNameIsSet(true); struct.tblName = iprot.readString(); struct.setTblNameIsSet(true); { org.apache.thrift.protocol.TList _list623 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING); struct.colNames = new java.util.ArrayList<java.lang.String>(_list623.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem624; for (int _i625 = 0; _i625 < _list623.size; ++_i625) { _elem624 = iprot.readString(); struct.colNames.add(_elem624); } } struct.setColNamesIsSet(true); java.util.BitSet incoming = iprot.readBitSet(4); if (incoming.get(0)) { struct.catName = iprot.readString(); struct.setCatNameIsSet(true); } if (incoming.get(1)) { struct.validWriteIdList = iprot.readString(); struct.setValidWriteIdListIsSet(true); } if (incoming.get(2)) { struct.engine = iprot.readString(); struct.setEngineIsSet(true); } if (incoming.get(3)) { struct.id = iprot.readI64(); struct.setIdIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
googleapis/google-cloud-java
35,311
java-bare-metal-solution/proto-google-cloud-bare-metal-solution-v2/src/main/java/com/google/cloud/baremetalsolution/v2/UpdateNetworkRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/baremetalsolution/v2/network.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.baremetalsolution.v2; /** * * * <pre> * Message requesting to updating a network. * </pre> * * Protobuf type {@code google.cloud.baremetalsolution.v2.UpdateNetworkRequest} */ public final class UpdateNetworkRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.baremetalsolution.v2.UpdateNetworkRequest) UpdateNetworkRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateNetworkRequest.newBuilder() to construct. private UpdateNetworkRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateNetworkRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateNetworkRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.baremetalsolution.v2.NetworkProto .internal_static_google_cloud_baremetalsolution_v2_UpdateNetworkRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.baremetalsolution.v2.NetworkProto .internal_static_google_cloud_baremetalsolution_v2_UpdateNetworkRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.class, com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.Builder.class); } private int bitField0_; public static final int NETWORK_FIELD_NUMBER = 1; private com.google.cloud.baremetalsolution.v2.Network network_; /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the network field is set. */ @java.lang.Override public boolean hasNetwork() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The network. */ @java.lang.Override public com.google.cloud.baremetalsolution.v2.Network getNetwork() { return network_ == null ? com.google.cloud.baremetalsolution.v2.Network.getDefaultInstance() : network_; } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.baremetalsolution.v2.NetworkOrBuilder getNetworkOrBuilder() { return network_ == null ? com.google.cloud.baremetalsolution.v2.Network.getDefaultInstance() : network_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNetwork()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNetwork()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest)) { return super.equals(obj); } com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest other = (com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest) obj; if (hasNetwork() != other.hasNetwork()) return false; if (hasNetwork()) { if (!getNetwork().equals(other.getNetwork())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNetwork()) { hash = (37 * hash) + NETWORK_FIELD_NUMBER; hash = (53 * hash) + getNetwork().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message requesting to updating a network. * </pre> * * Protobuf type {@code google.cloud.baremetalsolution.v2.UpdateNetworkRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.baremetalsolution.v2.UpdateNetworkRequest) com.google.cloud.baremetalsolution.v2.UpdateNetworkRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.baremetalsolution.v2.NetworkProto .internal_static_google_cloud_baremetalsolution_v2_UpdateNetworkRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.baremetalsolution.v2.NetworkProto .internal_static_google_cloud_baremetalsolution_v2_UpdateNetworkRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.class, com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.Builder.class); } // Construct using com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getNetworkFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; network_ = null; if (networkBuilder_ != null) { networkBuilder_.dispose(); networkBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.baremetalsolution.v2.NetworkProto .internal_static_google_cloud_baremetalsolution_v2_UpdateNetworkRequest_descriptor; } @java.lang.Override public com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest getDefaultInstanceForType() { return com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest build() { com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest buildPartial() { com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest result = new com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.network_ = networkBuilder_ == null ? network_ : networkBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest) { return mergeFrom((com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest other) { if (other == com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest.getDefaultInstance()) return this; if (other.hasNetwork()) { mergeNetwork(other.getNetwork()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getNetworkFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.baremetalsolution.v2.Network network_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.baremetalsolution.v2.Network, com.google.cloud.baremetalsolution.v2.Network.Builder, com.google.cloud.baremetalsolution.v2.NetworkOrBuilder> networkBuilder_; /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the network field is set. */ public boolean hasNetwork() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The network. */ public com.google.cloud.baremetalsolution.v2.Network getNetwork() { if (networkBuilder_ == null) { return network_ == null ? com.google.cloud.baremetalsolution.v2.Network.getDefaultInstance() : network_; } else { return networkBuilder_.getMessage(); } } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setNetwork(com.google.cloud.baremetalsolution.v2.Network value) { if (networkBuilder_ == null) { if (value == null) { throw new NullPointerException(); } network_ = value; } else { networkBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setNetwork( com.google.cloud.baremetalsolution.v2.Network.Builder builderForValue) { if (networkBuilder_ == null) { network_ = builderForValue.build(); } else { networkBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeNetwork(com.google.cloud.baremetalsolution.v2.Network value) { if (networkBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && network_ != null && network_ != com.google.cloud.baremetalsolution.v2.Network.getDefaultInstance()) { getNetworkBuilder().mergeFrom(value); } else { network_ = value; } } else { networkBuilder_.mergeFrom(value); } if (network_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearNetwork() { bitField0_ = (bitField0_ & ~0x00000001); network_ = null; if (networkBuilder_ != null) { networkBuilder_.dispose(); networkBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.baremetalsolution.v2.Network.Builder getNetworkBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNetworkFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.baremetalsolution.v2.NetworkOrBuilder getNetworkOrBuilder() { if (networkBuilder_ != null) { return networkBuilder_.getMessageOrBuilder(); } else { return network_ == null ? com.google.cloud.baremetalsolution.v2.Network.getDefaultInstance() : network_; } } /** * * * <pre> * Required. The network to update. * * The `name` field is used to identify the instance to update. * Format: projects/{project}/locations/{location}/networks/{network} * </pre> * * <code> * .google.cloud.baremetalsolution.v2.Network network = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.baremetalsolution.v2.Network, com.google.cloud.baremetalsolution.v2.Network.Builder, com.google.cloud.baremetalsolution.v2.NetworkOrBuilder> getNetworkFieldBuilder() { if (networkBuilder_ == null) { networkBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.baremetalsolution.v2.Network, com.google.cloud.baremetalsolution.v2.Network.Builder, com.google.cloud.baremetalsolution.v2.NetworkOrBuilder>( getNetwork(), getParentForChildren(), isClean()); network_ = null; } return networkBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The list of fields to update. * The only currently supported fields are: * `labels`, `reservations`, `vrf.vlan_attachments` * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.baremetalsolution.v2.UpdateNetworkRequest) } // @@protoc_insertion_point(class_scope:google.cloud.baremetalsolution.v2.UpdateNetworkRequest) private static final com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest(); } public static com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateNetworkRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateNetworkRequest>() { @java.lang.Override public UpdateNetworkRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateNetworkRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateNetworkRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.baremetalsolution.v2.UpdateNetworkRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,142
java-maps-fleetengine-delivery/proto-google-maps-fleetengine-delivery-v1/src/main/java/com/google/maps/fleetengine/delivery/v1/TaskAttribute.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/maps/fleetengine/delivery/v1/common.proto // Protobuf Java Version: 3.25.8 package com.google.maps.fleetengine.delivery.v1; /** * * * <pre> * Describes a task attribute as a key-value pair. The "key:value" string length * cannot exceed 256 characters. * </pre> * * Protobuf type {@code maps.fleetengine.delivery.v1.TaskAttribute} */ public final class TaskAttribute extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:maps.fleetengine.delivery.v1.TaskAttribute) TaskAttributeOrBuilder { private static final long serialVersionUID = 0L; // Use TaskAttribute.newBuilder() to construct. private TaskAttribute(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TaskAttribute() { key_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new TaskAttribute(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.maps.fleetengine.delivery.v1.Common .internal_static_maps_fleetengine_delivery_v1_TaskAttribute_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.maps.fleetengine.delivery.v1.Common .internal_static_maps_fleetengine_delivery_v1_TaskAttribute_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.maps.fleetengine.delivery.v1.TaskAttribute.class, com.google.maps.fleetengine.delivery.v1.TaskAttribute.Builder.class); } private int taskAttributeValueCase_ = 0; @SuppressWarnings("serial") private java.lang.Object taskAttributeValue_; public enum TaskAttributeValueCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { STRING_VALUE(2), BOOL_VALUE(3), NUMBER_VALUE(4), TASKATTRIBUTEVALUE_NOT_SET(0); private final int value; private TaskAttributeValueCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static TaskAttributeValueCase valueOf(int value) { return forNumber(value); } public static TaskAttributeValueCase forNumber(int value) { switch (value) { case 2: return STRING_VALUE; case 3: return BOOL_VALUE; case 4: return NUMBER_VALUE; case 0: return TASKATTRIBUTEVALUE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public TaskAttributeValueCase getTaskAttributeValueCase() { return TaskAttributeValueCase.forNumber(taskAttributeValueCase_); } public static final int KEY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object key_ = ""; /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @return The key. */ @java.lang.Override public java.lang.String getKey() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); key_ = s; return s; } } /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @return The bytes for key. */ @java.lang.Override public com.google.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); key_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STRING_VALUE_FIELD_NUMBER = 2; /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return Whether the stringValue field is set. */ public boolean hasStringValue() { return taskAttributeValueCase_ == 2; } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return The stringValue. */ public java.lang.String getStringValue() { java.lang.Object ref = ""; if (taskAttributeValueCase_ == 2) { ref = taskAttributeValue_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (taskAttributeValueCase_ == 2) { taskAttributeValue_ = s; } return s; } } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return The bytes for stringValue. */ public com.google.protobuf.ByteString getStringValueBytes() { java.lang.Object ref = ""; if (taskAttributeValueCase_ == 2) { ref = taskAttributeValue_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (taskAttributeValueCase_ == 2) { taskAttributeValue_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int BOOL_VALUE_FIELD_NUMBER = 3; /** * * * <pre> * Boolean typed attribute value. * </pre> * * <code>bool bool_value = 3;</code> * * @return Whether the boolValue field is set. */ @java.lang.Override public boolean hasBoolValue() { return taskAttributeValueCase_ == 3; } /** * * * <pre> * Boolean typed attribute value. * </pre> * * <code>bool bool_value = 3;</code> * * @return The boolValue. */ @java.lang.Override public boolean getBoolValue() { if (taskAttributeValueCase_ == 3) { return (java.lang.Boolean) taskAttributeValue_; } return false; } public static final int NUMBER_VALUE_FIELD_NUMBER = 4; /** * * * <pre> * Double typed attribute value. * </pre> * * <code>double number_value = 4;</code> * * @return Whether the numberValue field is set. */ @java.lang.Override public boolean hasNumberValue() { return taskAttributeValueCase_ == 4; } /** * * * <pre> * Double typed attribute value. * </pre> * * <code>double number_value = 4;</code> * * @return The numberValue. */ @java.lang.Override public double getNumberValue() { if (taskAttributeValueCase_ == 4) { return (java.lang.Double) taskAttributeValue_; } return 0D; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, key_); } if (taskAttributeValueCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, taskAttributeValue_); } if (taskAttributeValueCase_ == 3) { output.writeBool(3, (boolean) ((java.lang.Boolean) taskAttributeValue_)); } if (taskAttributeValueCase_ == 4) { output.writeDouble(4, (double) ((java.lang.Double) taskAttributeValue_)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(key_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, key_); } if (taskAttributeValueCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, taskAttributeValue_); } if (taskAttributeValueCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeBoolSize( 3, (boolean) ((java.lang.Boolean) taskAttributeValue_)); } if (taskAttributeValueCase_ == 4) { size += com.google.protobuf.CodedOutputStream.computeDoubleSize( 4, (double) ((java.lang.Double) taskAttributeValue_)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.maps.fleetengine.delivery.v1.TaskAttribute)) { return super.equals(obj); } com.google.maps.fleetengine.delivery.v1.TaskAttribute other = (com.google.maps.fleetengine.delivery.v1.TaskAttribute) obj; if (!getKey().equals(other.getKey())) return false; if (!getTaskAttributeValueCase().equals(other.getTaskAttributeValueCase())) return false; switch (taskAttributeValueCase_) { case 2: if (!getStringValue().equals(other.getStringValue())) return false; break; case 3: if (getBoolValue() != other.getBoolValue()) return false; break; case 4: if (java.lang.Double.doubleToLongBits(getNumberValue()) != java.lang.Double.doubleToLongBits(other.getNumberValue())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + KEY_FIELD_NUMBER; hash = (53 * hash) + getKey().hashCode(); switch (taskAttributeValueCase_) { case 2: hash = (37 * hash) + STRING_VALUE_FIELD_NUMBER; hash = (53 * hash) + getStringValue().hashCode(); break; case 3: hash = (37 * hash) + BOOL_VALUE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getBoolValue()); break; case 4: hash = (37 * hash) + NUMBER_VALUE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getNumberValue())); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.maps.fleetengine.delivery.v1.TaskAttribute prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Describes a task attribute as a key-value pair. The "key:value" string length * cannot exceed 256 characters. * </pre> * * Protobuf type {@code maps.fleetengine.delivery.v1.TaskAttribute} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:maps.fleetengine.delivery.v1.TaskAttribute) com.google.maps.fleetengine.delivery.v1.TaskAttributeOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.maps.fleetengine.delivery.v1.Common .internal_static_maps_fleetengine_delivery_v1_TaskAttribute_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.maps.fleetengine.delivery.v1.Common .internal_static_maps_fleetengine_delivery_v1_TaskAttribute_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.maps.fleetengine.delivery.v1.TaskAttribute.class, com.google.maps.fleetengine.delivery.v1.TaskAttribute.Builder.class); } // Construct using com.google.maps.fleetengine.delivery.v1.TaskAttribute.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; key_ = ""; taskAttributeValueCase_ = 0; taskAttributeValue_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.maps.fleetengine.delivery.v1.Common .internal_static_maps_fleetengine_delivery_v1_TaskAttribute_descriptor; } @java.lang.Override public com.google.maps.fleetengine.delivery.v1.TaskAttribute getDefaultInstanceForType() { return com.google.maps.fleetengine.delivery.v1.TaskAttribute.getDefaultInstance(); } @java.lang.Override public com.google.maps.fleetengine.delivery.v1.TaskAttribute build() { com.google.maps.fleetengine.delivery.v1.TaskAttribute result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.maps.fleetengine.delivery.v1.TaskAttribute buildPartial() { com.google.maps.fleetengine.delivery.v1.TaskAttribute result = new com.google.maps.fleetengine.delivery.v1.TaskAttribute(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.maps.fleetengine.delivery.v1.TaskAttribute result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.key_ = key_; } } private void buildPartialOneofs(com.google.maps.fleetengine.delivery.v1.TaskAttribute result) { result.taskAttributeValueCase_ = taskAttributeValueCase_; result.taskAttributeValue_ = this.taskAttributeValue_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.maps.fleetengine.delivery.v1.TaskAttribute) { return mergeFrom((com.google.maps.fleetengine.delivery.v1.TaskAttribute) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.maps.fleetengine.delivery.v1.TaskAttribute other) { if (other == com.google.maps.fleetengine.delivery.v1.TaskAttribute.getDefaultInstance()) return this; if (!other.getKey().isEmpty()) { key_ = other.key_; bitField0_ |= 0x00000001; onChanged(); } switch (other.getTaskAttributeValueCase()) { case STRING_VALUE: { taskAttributeValueCase_ = 2; taskAttributeValue_ = other.taskAttributeValue_; onChanged(); break; } case BOOL_VALUE: { setBoolValue(other.getBoolValue()); break; } case NUMBER_VALUE: { setNumberValue(other.getNumberValue()); break; } case TASKATTRIBUTEVALUE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { key_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); taskAttributeValueCase_ = 2; taskAttributeValue_ = s; break; } // case 18 case 24: { taskAttributeValue_ = input.readBool(); taskAttributeValueCase_ = 3; break; } // case 24 case 33: { taskAttributeValue_ = input.readDouble(); taskAttributeValueCase_ = 4; break; } // case 33 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int taskAttributeValueCase_ = 0; private java.lang.Object taskAttributeValue_; public TaskAttributeValueCase getTaskAttributeValueCase() { return TaskAttributeValueCase.forNumber(taskAttributeValueCase_); } public Builder clearTaskAttributeValue() { taskAttributeValueCase_ = 0; taskAttributeValue_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object key_ = ""; /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @return The key. */ public java.lang.String getKey() { java.lang.Object ref = key_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); key_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @return The bytes for key. */ public com.google.protobuf.ByteString getKeyBytes() { java.lang.Object ref = key_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); key_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @param value The key to set. * @return This builder for chaining. */ public Builder setKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @return This builder for chaining. */ public Builder clearKey() { key_ = getDefaultInstance().getKey(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The attribute's key. Keys may not contain the colon character (:). * </pre> * * <code>string key = 1;</code> * * @param value The bytes for key to set. * @return This builder for chaining. */ public Builder setKeyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); key_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return Whether the stringValue field is set. */ @java.lang.Override public boolean hasStringValue() { return taskAttributeValueCase_ == 2; } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return The stringValue. */ @java.lang.Override public java.lang.String getStringValue() { java.lang.Object ref = ""; if (taskAttributeValueCase_ == 2) { ref = taskAttributeValue_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (taskAttributeValueCase_ == 2) { taskAttributeValue_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return The bytes for stringValue. */ @java.lang.Override public com.google.protobuf.ByteString getStringValueBytes() { java.lang.Object ref = ""; if (taskAttributeValueCase_ == 2) { ref = taskAttributeValue_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (taskAttributeValueCase_ == 2) { taskAttributeValue_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @param value The stringValue to set. * @return This builder for chaining. */ public Builder setStringValue(java.lang.String value) { if (value == null) { throw new NullPointerException(); } taskAttributeValueCase_ = 2; taskAttributeValue_ = value; onChanged(); return this; } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @return This builder for chaining. */ public Builder clearStringValue() { if (taskAttributeValueCase_ == 2) { taskAttributeValueCase_ = 0; taskAttributeValue_ = null; onChanged(); } return this; } /** * * * <pre> * String typed attribute value. * </pre> * * <code>string string_value = 2;</code> * * @param value The bytes for stringValue to set. * @return This builder for chaining. */ public Builder setStringValueBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); taskAttributeValueCase_ = 2; taskAttributeValue_ = value; onChanged(); return this; } /** * * * <pre> * Boolean typed attribute value. * </pre> * * <code>bool bool_value = 3;</code> * * @return Whether the boolValue field is set. */ public boolean hasBoolValue() { return taskAttributeValueCase_ == 3; } /** * * * <pre> * Boolean typed attribute value. * </pre> * * <code>bool bool_value = 3;</code> * * @return The boolValue. */ public boolean getBoolValue() { if (taskAttributeValueCase_ == 3) { return (java.lang.Boolean) taskAttributeValue_; } return false; } /** * * * <pre> * Boolean typed attribute value. * </pre> * * <code>bool bool_value = 3;</code> * * @param value The boolValue to set. * @return This builder for chaining. */ public Builder setBoolValue(boolean value) { taskAttributeValueCase_ = 3; taskAttributeValue_ = value; onChanged(); return this; } /** * * * <pre> * Boolean typed attribute value. * </pre> * * <code>bool bool_value = 3;</code> * * @return This builder for chaining. */ public Builder clearBoolValue() { if (taskAttributeValueCase_ == 3) { taskAttributeValueCase_ = 0; taskAttributeValue_ = null; onChanged(); } return this; } /** * * * <pre> * Double typed attribute value. * </pre> * * <code>double number_value = 4;</code> * * @return Whether the numberValue field is set. */ public boolean hasNumberValue() { return taskAttributeValueCase_ == 4; } /** * * * <pre> * Double typed attribute value. * </pre> * * <code>double number_value = 4;</code> * * @return The numberValue. */ public double getNumberValue() { if (taskAttributeValueCase_ == 4) { return (java.lang.Double) taskAttributeValue_; } return 0D; } /** * * * <pre> * Double typed attribute value. * </pre> * * <code>double number_value = 4;</code> * * @param value The numberValue to set. * @return This builder for chaining. */ public Builder setNumberValue(double value) { taskAttributeValueCase_ = 4; taskAttributeValue_ = value; onChanged(); return this; } /** * * * <pre> * Double typed attribute value. * </pre> * * <code>double number_value = 4;</code> * * @return This builder for chaining. */ public Builder clearNumberValue() { if (taskAttributeValueCase_ == 4) { taskAttributeValueCase_ = 0; taskAttributeValue_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:maps.fleetengine.delivery.v1.TaskAttribute) } // @@protoc_insertion_point(class_scope:maps.fleetengine.delivery.v1.TaskAttribute) private static final com.google.maps.fleetengine.delivery.v1.TaskAttribute DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.maps.fleetengine.delivery.v1.TaskAttribute(); } public static com.google.maps.fleetengine.delivery.v1.TaskAttribute getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TaskAttribute> PARSER = new com.google.protobuf.AbstractParser<TaskAttribute>() { @java.lang.Override public TaskAttribute parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<TaskAttribute> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TaskAttribute> getParserForType() { return PARSER; } @java.lang.Override public com.google.maps.fleetengine.delivery.v1.TaskAttribute getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,349
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/SummarizationVerbosityInput.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Input for summarization verbosity metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.SummarizationVerbosityInput} */ public final class SummarizationVerbosityInput extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.SummarizationVerbosityInput) SummarizationVerbosityInputOrBuilder { private static final long serialVersionUID = 0L; // Use SummarizationVerbosityInput.newBuilder() to construct. private SummarizationVerbosityInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SummarizationVerbosityInput() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SummarizationVerbosityInput(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_SummarizationVerbosityInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_SummarizationVerbosityInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.class, com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.Builder.class); } private int bitField0_; public static final int METRIC_SPEC_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1.SummarizationVerbositySpec metricSpec_; /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ @java.lang.Override public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbositySpec getMetricSpec() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.getDefaultInstance() : metricSpec_; } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbositySpecOrBuilder getMetricSpecOrBuilder() { return metricSpec_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.getDefaultInstance() : metricSpec_; } public static final int INSTANCE_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance_; /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ @java.lang.Override public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance getInstance() { return instance_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.getDefaultInstance() : instance_; } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbosityInstanceOrBuilder getInstanceOrBuilder() { return instance_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.getDefaultInstance() : instance_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getInstance()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.SummarizationVerbosityInput)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.SummarizationVerbosityInput other = (com.google.cloud.aiplatform.v1.SummarizationVerbosityInput) obj; if (hasMetricSpec() != other.hasMetricSpec()) return false; if (hasMetricSpec()) { if (!getMetricSpec().equals(other.getMetricSpec())) return false; } if (hasInstance() != other.hasInstance()) return false; if (hasInstance()) { if (!getInstance().equals(other.getInstance())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasMetricSpec()) { hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER; hash = (53 * hash) + getMetricSpec().hashCode(); } if (hasInstance()) { hash = (37 * hash) + INSTANCE_FIELD_NUMBER; hash = (53 * hash) + getInstance().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.SummarizationVerbosityInput prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Input for summarization verbosity metric. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.SummarizationVerbosityInput} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.SummarizationVerbosityInput) com.google.cloud.aiplatform.v1.SummarizationVerbosityInputOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_SummarizationVerbosityInput_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_SummarizationVerbosityInput_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.class, com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getMetricSpecFieldBuilder(); getInstanceFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1_SummarizationVerbosityInput_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbosityInput getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbosityInput build() { com.google.cloud.aiplatform.v1.SummarizationVerbosityInput result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbosityInput buildPartial() { com.google.cloud.aiplatform.v1.SummarizationVerbosityInput result = new com.google.cloud.aiplatform.v1.SummarizationVerbosityInput(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.SummarizationVerbosityInput result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.SummarizationVerbosityInput) { return mergeFrom((com.google.cloud.aiplatform.v1.SummarizationVerbosityInput) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.SummarizationVerbosityInput other) { if (other == com.google.cloud.aiplatform.v1.SummarizationVerbosityInput.getDefaultInstance()) return this; if (other.hasMetricSpec()) { mergeMetricSpec(other.getMetricSpec()); } if (other.hasInstance()) { mergeInstance(other.getInstance()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.aiplatform.v1.SummarizationVerbositySpec metricSpec_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.SummarizationVerbositySpec, com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.Builder, com.google.cloud.aiplatform.v1.SummarizationVerbositySpecOrBuilder> metricSpecBuilder_; /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the metricSpec field is set. */ public boolean hasMetricSpec() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The metricSpec. */ public com.google.cloud.aiplatform.v1.SummarizationVerbositySpec getMetricSpec() { if (metricSpecBuilder_ == null) { return metricSpec_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.getDefaultInstance() : metricSpec_; } else { return metricSpecBuilder_.getMessage(); } } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec(com.google.cloud.aiplatform.v1.SummarizationVerbositySpec value) { if (metricSpecBuilder_ == null) { if (value == null) { throw new NullPointerException(); } metricSpec_ = value; } else { metricSpecBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setMetricSpec( com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.Builder builderForValue) { if (metricSpecBuilder_ == null) { metricSpec_ = builderForValue.build(); } else { metricSpecBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeMetricSpec( com.google.cloud.aiplatform.v1.SummarizationVerbositySpec value) { if (metricSpecBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && metricSpec_ != null && metricSpec_ != com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.getDefaultInstance()) { getMetricSpecBuilder().mergeFrom(value); } else { metricSpec_ = value; } } else { metricSpecBuilder_.mergeFrom(value); } if (metricSpec_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearMetricSpec() { bitField0_ = (bitField0_ & ~0x00000001); metricSpec_ = null; if (metricSpecBuilder_ != null) { metricSpecBuilder_.dispose(); metricSpecBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.Builder getMetricSpecBuilder() { bitField0_ |= 0x00000001; onChanged(); return getMetricSpecFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.SummarizationVerbositySpecOrBuilder getMetricSpecOrBuilder() { if (metricSpecBuilder_ != null) { return metricSpecBuilder_.getMessageOrBuilder(); } else { return metricSpec_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.getDefaultInstance() : metricSpec_; } } /** * * * <pre> * Required. Spec for summarization verbosity score metric. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbositySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.SummarizationVerbositySpec, com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.Builder, com.google.cloud.aiplatform.v1.SummarizationVerbositySpecOrBuilder> getMetricSpecFieldBuilder() { if (metricSpecBuilder_ == null) { metricSpecBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.SummarizationVerbositySpec, com.google.cloud.aiplatform.v1.SummarizationVerbositySpec.Builder, com.google.cloud.aiplatform.v1.SummarizationVerbositySpecOrBuilder>( getMetricSpec(), getParentForChildren(), isClean()); metricSpec_ = null; } return metricSpecBuilder_; } private com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance, com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.Builder, com.google.cloud.aiplatform.v1.SummarizationVerbosityInstanceOrBuilder> instanceBuilder_; /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the instance field is set. */ public boolean hasInstance() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The instance. */ public com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance getInstance() { if (instanceBuilder_ == null) { return instance_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.getDefaultInstance() : instance_; } else { return instanceBuilder_.getMessage(); } } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance value) { if (instanceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } instance_ = value; } else { instanceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setInstance( com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.Builder builderForValue) { if (instanceBuilder_ == null) { instance_ = builderForValue.build(); } else { instanceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeInstance( com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance value) { if (instanceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && instance_ != null && instance_ != com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance .getDefaultInstance()) { getInstanceBuilder().mergeFrom(value); } else { instance_ = value; } } else { instanceBuilder_.mergeFrom(value); } if (instance_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearInstance() { bitField0_ = (bitField0_ & ~0x00000002); instance_ = null; if (instanceBuilder_ != null) { instanceBuilder_.dispose(); instanceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.Builder getInstanceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getInstanceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.SummarizationVerbosityInstanceOrBuilder getInstanceOrBuilder() { if (instanceBuilder_ != null) { return instanceBuilder_.getMessageOrBuilder(); } else { return instance_ == null ? com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.getDefaultInstance() : instance_; } } /** * * * <pre> * Required. Summarization verbosity instance. * </pre> * * <code> * .google.cloud.aiplatform.v1.SummarizationVerbosityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance, com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.Builder, com.google.cloud.aiplatform.v1.SummarizationVerbosityInstanceOrBuilder> getInstanceFieldBuilder() { if (instanceBuilder_ == null) { instanceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance, com.google.cloud.aiplatform.v1.SummarizationVerbosityInstance.Builder, com.google.cloud.aiplatform.v1.SummarizationVerbosityInstanceOrBuilder>( getInstance(), getParentForChildren(), isClean()); instance_ = null; } return instanceBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.SummarizationVerbosityInput) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.SummarizationVerbosityInput) private static final com.google.cloud.aiplatform.v1.SummarizationVerbosityInput DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.SummarizationVerbosityInput(); } public static com.google.cloud.aiplatform.v1.SummarizationVerbosityInput getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SummarizationVerbosityInput> PARSER = new com.google.protobuf.AbstractParser<SummarizationVerbosityInput>() { @java.lang.Override public SummarizationVerbosityInput parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SummarizationVerbosityInput> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SummarizationVerbosityInput> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.SummarizationVerbosityInput getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,526
java-discoveryengine/grpc-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/UserEventServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.discoveryengine.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/cloud/discoveryengine/v1/user_event_service.proto") @io.grpc.stub.annotations.GrpcGenerated public final class UserEventServiceGrpc { private UserEventServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.cloud.discoveryengine.v1.UserEventService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.WriteUserEventRequest, com.google.cloud.discoveryengine.v1.UserEvent> getWriteUserEventMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "WriteUserEvent", requestType = com.google.cloud.discoveryengine.v1.WriteUserEventRequest.class, responseType = com.google.cloud.discoveryengine.v1.UserEvent.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.WriteUserEventRequest, com.google.cloud.discoveryengine.v1.UserEvent> getWriteUserEventMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.WriteUserEventRequest, com.google.cloud.discoveryengine.v1.UserEvent> getWriteUserEventMethod; if ((getWriteUserEventMethod = UserEventServiceGrpc.getWriteUserEventMethod) == null) { synchronized (UserEventServiceGrpc.class) { if ((getWriteUserEventMethod = UserEventServiceGrpc.getWriteUserEventMethod) == null) { UserEventServiceGrpc.getWriteUserEventMethod = getWriteUserEventMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1.WriteUserEventRequest, com.google.cloud.discoveryengine.v1.UserEvent> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "WriteUserEvent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1.WriteUserEventRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1.UserEvent.getDefaultInstance())) .setSchemaDescriptor( new UserEventServiceMethodDescriptorSupplier("WriteUserEvent")) .build(); } } } return getWriteUserEventMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.CollectUserEventRequest, com.google.api.HttpBody> getCollectUserEventMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CollectUserEvent", requestType = com.google.cloud.discoveryengine.v1.CollectUserEventRequest.class, responseType = com.google.api.HttpBody.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.CollectUserEventRequest, com.google.api.HttpBody> getCollectUserEventMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.CollectUserEventRequest, com.google.api.HttpBody> getCollectUserEventMethod; if ((getCollectUserEventMethod = UserEventServiceGrpc.getCollectUserEventMethod) == null) { synchronized (UserEventServiceGrpc.class) { if ((getCollectUserEventMethod = UserEventServiceGrpc.getCollectUserEventMethod) == null) { UserEventServiceGrpc.getCollectUserEventMethod = getCollectUserEventMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1.CollectUserEventRequest, com.google.api.HttpBody> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CollectUserEvent")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1.CollectUserEventRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.api.HttpBody.getDefaultInstance())) .setSchemaDescriptor( new UserEventServiceMethodDescriptorSupplier("CollectUserEvent")) .build(); } } } return getCollectUserEventMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest, com.google.longrunning.Operation> getPurgeUserEventsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "PurgeUserEvents", requestType = com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest, com.google.longrunning.Operation> getPurgeUserEventsMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest, com.google.longrunning.Operation> getPurgeUserEventsMethod; if ((getPurgeUserEventsMethod = UserEventServiceGrpc.getPurgeUserEventsMethod) == null) { synchronized (UserEventServiceGrpc.class) { if ((getPurgeUserEventsMethod = UserEventServiceGrpc.getPurgeUserEventsMethod) == null) { UserEventServiceGrpc.getPurgeUserEventsMethod = getPurgeUserEventsMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "PurgeUserEvents")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor( new UserEventServiceMethodDescriptorSupplier("PurgeUserEvents")) .build(); } } } return getPurgeUserEventsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.ImportUserEventsRequest, com.google.longrunning.Operation> getImportUserEventsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ImportUserEvents", requestType = com.google.cloud.discoveryengine.v1.ImportUserEventsRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.ImportUserEventsRequest, com.google.longrunning.Operation> getImportUserEventsMethod() { io.grpc.MethodDescriptor< com.google.cloud.discoveryengine.v1.ImportUserEventsRequest, com.google.longrunning.Operation> getImportUserEventsMethod; if ((getImportUserEventsMethod = UserEventServiceGrpc.getImportUserEventsMethod) == null) { synchronized (UserEventServiceGrpc.class) { if ((getImportUserEventsMethod = UserEventServiceGrpc.getImportUserEventsMethod) == null) { UserEventServiceGrpc.getImportUserEventsMethod = getImportUserEventsMethod = io.grpc.MethodDescriptor .<com.google.cloud.discoveryengine.v1.ImportUserEventsRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ImportUserEvents")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.discoveryengine.v1.ImportUserEventsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor( new UserEventServiceMethodDescriptorSupplier("ImportUserEvents")) .build(); } } } return getImportUserEventsMethod; } /** Creates a new async stub that supports all call types for the service */ public static UserEventServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserEventServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceStub>() { @java.lang.Override public UserEventServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceStub(channel, callOptions); } }; return UserEventServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static UserEventServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingV2Stub>() { @java.lang.Override public UserEventServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceBlockingV2Stub(channel, callOptions); } }; return UserEventServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static UserEventServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingStub>() { @java.lang.Override public UserEventServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceBlockingStub(channel, callOptions); } }; return UserEventServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static UserEventServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<UserEventServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceFutureStub>() { @java.lang.Override public UserEventServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceFutureStub(channel, callOptions); } }; return UserEventServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ public interface AsyncService { /** * * * <pre> * Writes a single user event. * </pre> */ default void writeUserEvent( com.google.cloud.discoveryengine.v1.WriteUserEventRequest request, io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.UserEvent> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getWriteUserEventMethod(), responseObserver); } /** * * * <pre> * Writes a single user event from the browser. This uses a GET request to * due to browser restriction of POST-ing to a third-party domain. * This method is used only by the Discovery Engine API JavaScript pixel and * Google Tag Manager. Users should not call this method directly. * </pre> */ default void collectUserEvent( com.google.cloud.discoveryengine.v1.CollectUserEventRequest request, io.grpc.stub.StreamObserver<com.google.api.HttpBody> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCollectUserEventMethod(), responseObserver); } /** * * * <pre> * Deletes permanently all user events specified by the filter provided. * Depending on the number of events specified by the filter, this operation * could take hours or days to complete. To test a filter, use the list * command first. * </pre> */ default void purgeUserEvents( com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getPurgeUserEventsMethod(), responseObserver); } /** * * * <pre> * Bulk import of user events. Request processing might be * synchronous. Events that already exist are skipped. * Use this method for backfilling historical user events. * Operation.response is of type ImportResponse. Note that it is * possible for a subset of the items to be successfully inserted. * Operation.metadata is of type ImportMetadata. * </pre> */ default void importUserEvents( com.google.cloud.discoveryengine.v1.ImportUserEventsRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getImportUserEventsMethod(), responseObserver); } } /** * Base class for the server implementation of the service UserEventService. * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ public abstract static class UserEventServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return UserEventServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service UserEventService. * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ public static final class UserEventServiceStub extends io.grpc.stub.AbstractAsyncStub<UserEventServiceStub> { private UserEventServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserEventServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceStub(channel, callOptions); } /** * * * <pre> * Writes a single user event. * </pre> */ public void writeUserEvent( com.google.cloud.discoveryengine.v1.WriteUserEventRequest request, io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.UserEvent> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getWriteUserEventMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Writes a single user event from the browser. This uses a GET request to * due to browser restriction of POST-ing to a third-party domain. * This method is used only by the Discovery Engine API JavaScript pixel and * Google Tag Manager. Users should not call this method directly. * </pre> */ public void collectUserEvent( com.google.cloud.discoveryengine.v1.CollectUserEventRequest request, io.grpc.stub.StreamObserver<com.google.api.HttpBody> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCollectUserEventMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes permanently all user events specified by the filter provided. * Depending on the number of events specified by the filter, this operation * could take hours or days to complete. To test a filter, use the list * command first. * </pre> */ public void purgeUserEvents( com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getPurgeUserEventsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Bulk import of user events. Request processing might be * synchronous. Events that already exist are skipped. * Use this method for backfilling historical user events. * Operation.response is of type ImportResponse. Note that it is * possible for a subset of the items to be successfully inserted. * Operation.metadata is of type ImportMetadata. * </pre> */ public void importUserEvents( com.google.cloud.discoveryengine.v1.ImportUserEventsRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getImportUserEventsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service UserEventService. * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ public static final class UserEventServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<UserEventServiceBlockingV2Stub> { private UserEventServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserEventServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Writes a single user event. * </pre> */ public com.google.cloud.discoveryengine.v1.UserEvent writeUserEvent( com.google.cloud.discoveryengine.v1.WriteUserEventRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getWriteUserEventMethod(), getCallOptions(), request); } /** * * * <pre> * Writes a single user event from the browser. This uses a GET request to * due to browser restriction of POST-ing to a third-party domain. * This method is used only by the Discovery Engine API JavaScript pixel and * Google Tag Manager. Users should not call this method directly. * </pre> */ public com.google.api.HttpBody collectUserEvent( com.google.cloud.discoveryengine.v1.CollectUserEventRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCollectUserEventMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes permanently all user events specified by the filter provided. * Depending on the number of events specified by the filter, this operation * could take hours or days to complete. To test a filter, use the list * command first. * </pre> */ public com.google.longrunning.Operation purgeUserEvents( com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getPurgeUserEventsMethod(), getCallOptions(), request); } /** * * * <pre> * Bulk import of user events. Request processing might be * synchronous. Events that already exist are skipped. * Use this method for backfilling historical user events. * Operation.response is of type ImportResponse. Note that it is * possible for a subset of the items to be successfully inserted. * Operation.metadata is of type ImportMetadata. * </pre> */ public com.google.longrunning.Operation importUserEvents( com.google.cloud.discoveryengine.v1.ImportUserEventsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getImportUserEventsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service UserEventService. * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ public static final class UserEventServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<UserEventServiceBlockingStub> { private UserEventServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserEventServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Writes a single user event. * </pre> */ public com.google.cloud.discoveryengine.v1.UserEvent writeUserEvent( com.google.cloud.discoveryengine.v1.WriteUserEventRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getWriteUserEventMethod(), getCallOptions(), request); } /** * * * <pre> * Writes a single user event from the browser. This uses a GET request to * due to browser restriction of POST-ing to a third-party domain. * This method is used only by the Discovery Engine API JavaScript pixel and * Google Tag Manager. Users should not call this method directly. * </pre> */ public com.google.api.HttpBody collectUserEvent( com.google.cloud.discoveryengine.v1.CollectUserEventRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCollectUserEventMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes permanently all user events specified by the filter provided. * Depending on the number of events specified by the filter, this operation * could take hours or days to complete. To test a filter, use the list * command first. * </pre> */ public com.google.longrunning.Operation purgeUserEvents( com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getPurgeUserEventsMethod(), getCallOptions(), request); } /** * * * <pre> * Bulk import of user events. Request processing might be * synchronous. Events that already exist are skipped. * Use this method for backfilling historical user events. * Operation.response is of type ImportResponse. Note that it is * possible for a subset of the items to be successfully inserted. * Operation.metadata is of type ImportMetadata. * </pre> */ public com.google.longrunning.Operation importUserEvents( com.google.cloud.discoveryengine.v1.ImportUserEventsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getImportUserEventsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service UserEventService. * * <pre> * Service for ingesting end user actions on a website to Discovery Engine API. * </pre> */ public static final class UserEventServiceFutureStub extends io.grpc.stub.AbstractFutureStub<UserEventServiceFutureStub> { private UserEventServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected UserEventServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new UserEventServiceFutureStub(channel, callOptions); } /** * * * <pre> * Writes a single user event. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.discoveryengine.v1.UserEvent> writeUserEvent(com.google.cloud.discoveryengine.v1.WriteUserEventRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getWriteUserEventMethod(), getCallOptions()), request); } /** * * * <pre> * Writes a single user event from the browser. This uses a GET request to * due to browser restriction of POST-ing to a third-party domain. * This method is used only by the Discovery Engine API JavaScript pixel and * Google Tag Manager. Users should not call this method directly. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.api.HttpBody> collectUserEvent(com.google.cloud.discoveryengine.v1.CollectUserEventRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCollectUserEventMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes permanently all user events specified by the filter provided. * Depending on the number of events specified by the filter, this operation * could take hours or days to complete. To test a filter, use the list * command first. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> purgeUserEvents(com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getPurgeUserEventsMethod(), getCallOptions()), request); } /** * * * <pre> * Bulk import of user events. Request processing might be * synchronous. Events that already exist are skipped. * Use this method for backfilling historical user events. * Operation.response is of type ImportResponse. Note that it is * possible for a subset of the items to be successfully inserted. * Operation.metadata is of type ImportMetadata. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> importUserEvents(com.google.cloud.discoveryengine.v1.ImportUserEventsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getImportUserEventsMethod(), getCallOptions()), request); } } private static final int METHODID_WRITE_USER_EVENT = 0; private static final int METHODID_COLLECT_USER_EVENT = 1; private static final int METHODID_PURGE_USER_EVENTS = 2; private static final int METHODID_IMPORT_USER_EVENTS = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_WRITE_USER_EVENT: serviceImpl.writeUserEvent( (com.google.cloud.discoveryengine.v1.WriteUserEventRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.UserEvent>) responseObserver); break; case METHODID_COLLECT_USER_EVENT: serviceImpl.collectUserEvent( (com.google.cloud.discoveryengine.v1.CollectUserEventRequest) request, (io.grpc.stub.StreamObserver<com.google.api.HttpBody>) responseObserver); break; case METHODID_PURGE_USER_EVENTS: serviceImpl.purgeUserEvents( (com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_IMPORT_USER_EVENTS: serviceImpl.importUserEvents( (com.google.cloud.discoveryengine.v1.ImportUserEventsRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getWriteUserEventMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1.WriteUserEventRequest, com.google.cloud.discoveryengine.v1.UserEvent>( service, METHODID_WRITE_USER_EVENT))) .addMethod( getCollectUserEventMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1.CollectUserEventRequest, com.google.api.HttpBody>(service, METHODID_COLLECT_USER_EVENT))) .addMethod( getPurgeUserEventsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1.PurgeUserEventsRequest, com.google.longrunning.Operation>(service, METHODID_PURGE_USER_EVENTS))) .addMethod( getImportUserEventsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.discoveryengine.v1.ImportUserEventsRequest, com.google.longrunning.Operation>(service, METHODID_IMPORT_USER_EVENTS))) .build(); } private abstract static class UserEventServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { UserEventServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.discoveryengine.v1.UserEventServiceProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("UserEventService"); } } private static final class UserEventServiceFileDescriptorSupplier extends UserEventServiceBaseDescriptorSupplier { UserEventServiceFileDescriptorSupplier() {} } private static final class UserEventServiceMethodDescriptorSupplier extends UserEventServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; UserEventServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (UserEventServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new UserEventServiceFileDescriptorSupplier()) .addMethod(getWriteUserEventMethod()) .addMethod(getCollectUserEventMethod()) .addMethod(getPurgeUserEventsMethod()) .addMethod(getImportUserEventsMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
35,221
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/IntentSuggestion.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/participant.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * Represents an intent suggestion. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.IntentSuggestion} */ public final class IntentSuggestion extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.IntentSuggestion) IntentSuggestionOrBuilder { private static final long serialVersionUID = 0L; // Use IntentSuggestion.newBuilder() to construct. private IntentSuggestion(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private IntentSuggestion() { displayName_ = ""; description_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new IntentSuggestion(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_IntentSuggestion_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_IntentSuggestion_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.IntentSuggestion.class, com.google.cloud.dialogflow.v2beta1.IntentSuggestion.Builder.class); } private int intentCase_ = 0; @SuppressWarnings("serial") private java.lang.Object intent_; public enum IntentCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { INTENT_V2(2), INTENT_NOT_SET(0); private final int value; private IntentCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static IntentCase valueOf(int value) { return forNumber(value); } public static IntentCase forNumber(int value) { switch (value) { case 2: return INTENT_V2; case 0: return INTENT_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public IntentCase getIntentCase() { return IntentCase.forNumber(intentCase_); } public static final int DISPLAY_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object displayName_ = ""; /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @return The displayName. */ @java.lang.Override public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } } /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @return The bytes for displayName. */ @java.lang.Override public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INTENT_V2_FIELD_NUMBER = 2; /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return Whether the intentV2 field is set. */ public boolean hasIntentV2() { return intentCase_ == 2; } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return The intentV2. */ public java.lang.String getIntentV2() { java.lang.Object ref = ""; if (intentCase_ == 2) { ref = intent_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (intentCase_ == 2) { intent_ = s; } return s; } } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return The bytes for intentV2. */ public com.google.protobuf.ByteString getIntentV2Bytes() { java.lang.Object ref = ""; if (intentCase_ == 2) { ref = intent_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (intentCase_ == 2) { intent_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int DESCRIPTION_FIELD_NUMBER = 5; @SuppressWarnings("serial") private volatile java.lang.Object description_ = ""; /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @return The description. */ @java.lang.Override public java.lang.String getDescription() { java.lang.Object ref = description_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); description_ = s; return s; } } /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @return The bytes for description. */ @java.lang.Override public com.google.protobuf.ByteString getDescriptionBytes() { java.lang.Object ref = description_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); description_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, displayName_); } if (intentCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, intent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, description_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, displayName_); } if (intentCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, intent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(description_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, description_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.IntentSuggestion)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.IntentSuggestion other = (com.google.cloud.dialogflow.v2beta1.IntentSuggestion) obj; if (!getDisplayName().equals(other.getDisplayName())) return false; if (!getDescription().equals(other.getDescription())) return false; if (!getIntentCase().equals(other.getIntentCase())) return false; switch (intentCase_) { case 2: if (!getIntentV2().equals(other.getIntentV2())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER; hash = (53 * hash) + getDisplayName().hashCode(); hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER; hash = (53 * hash) + getDescription().hashCode(); switch (intentCase_) { case 2: hash = (37 * hash) + INTENT_V2_FIELD_NUMBER; hash = (53 * hash) + getIntentV2().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.v2beta1.IntentSuggestion prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Represents an intent suggestion. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.IntentSuggestion} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.IntentSuggestion) com.google.cloud.dialogflow.v2beta1.IntentSuggestionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_IntentSuggestion_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_IntentSuggestion_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.IntentSuggestion.class, com.google.cloud.dialogflow.v2beta1.IntentSuggestion.Builder.class); } // Construct using com.google.cloud.dialogflow.v2beta1.IntentSuggestion.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; displayName_ = ""; description_ = ""; intentCase_ = 0; intent_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.ParticipantProto .internal_static_google_cloud_dialogflow_v2beta1_IntentSuggestion_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.IntentSuggestion getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.IntentSuggestion.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.IntentSuggestion build() { com.google.cloud.dialogflow.v2beta1.IntentSuggestion result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.IntentSuggestion buildPartial() { com.google.cloud.dialogflow.v2beta1.IntentSuggestion result = new com.google.cloud.dialogflow.v2beta1.IntentSuggestion(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.v2beta1.IntentSuggestion result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.displayName_ = displayName_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.description_ = description_; } } private void buildPartialOneofs(com.google.cloud.dialogflow.v2beta1.IntentSuggestion result) { result.intentCase_ = intentCase_; result.intent_ = this.intent_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.IntentSuggestion) { return mergeFrom((com.google.cloud.dialogflow.v2beta1.IntentSuggestion) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.IntentSuggestion other) { if (other == com.google.cloud.dialogflow.v2beta1.IntentSuggestion.getDefaultInstance()) return this; if (!other.getDisplayName().isEmpty()) { displayName_ = other.displayName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getDescription().isEmpty()) { description_ = other.description_; bitField0_ |= 0x00000004; onChanged(); } switch (other.getIntentCase()) { case INTENT_V2: { intentCase_ = 2; intent_ = other.intent_; onChanged(); break; } case INTENT_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { displayName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); intentCase_ = 2; intent_ = s; break; } // case 18 case 42: { description_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 42 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int intentCase_ = 0; private java.lang.Object intent_; public IntentCase getIntentCase() { return IntentCase.forNumber(intentCase_); } public Builder clearIntent() { intentCase_ = 0; intent_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object displayName_ = ""; /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @return The displayName. */ public java.lang.String getDisplayName() { java.lang.Object ref = displayName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); displayName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @return The bytes for displayName. */ public com.google.protobuf.ByteString getDisplayNameBytes() { java.lang.Object ref = displayName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); displayName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @param value The displayName to set. * @return This builder for chaining. */ public Builder setDisplayName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } displayName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @return This builder for chaining. */ public Builder clearDisplayName() { displayName_ = getDefaultInstance().getDisplayName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The display name of the intent. * </pre> * * <code>string display_name = 1;</code> * * @param value The bytes for displayName to set. * @return This builder for chaining. */ public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); displayName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return Whether the intentV2 field is set. */ @java.lang.Override public boolean hasIntentV2() { return intentCase_ == 2; } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return The intentV2. */ @java.lang.Override public java.lang.String getIntentV2() { java.lang.Object ref = ""; if (intentCase_ == 2) { ref = intent_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (intentCase_ == 2) { intent_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return The bytes for intentV2. */ @java.lang.Override public com.google.protobuf.ByteString getIntentV2Bytes() { java.lang.Object ref = ""; if (intentCase_ == 2) { ref = intent_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (intentCase_ == 2) { intent_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @param value The intentV2 to set. * @return This builder for chaining. */ public Builder setIntentV2(java.lang.String value) { if (value == null) { throw new NullPointerException(); } intentCase_ = 2; intent_ = value; onChanged(); return this; } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @return This builder for chaining. */ public Builder clearIntentV2() { if (intentCase_ == 2) { intentCase_ = 0; intent_ = null; onChanged(); } return this; } /** * * * <pre> * The unique identifier of this * [intent][google.cloud.dialogflow.v2beta1.Intent]. Format: * `projects/&lt;Project ID&gt;/locations/&lt;Location ID&gt;/agent/intents/&lt;Intent * ID&gt;`. * </pre> * * <code>string intent_v2 = 2;</code> * * @param value The bytes for intentV2 to set. * @return This builder for chaining. */ public Builder setIntentV2Bytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); intentCase_ = 2; intent_ = value; onChanged(); return this; } private java.lang.Object description_ = ""; /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @return The description. */ public java.lang.String getDescription() { java.lang.Object ref = description_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); description_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @return The bytes for description. */ public com.google.protobuf.ByteString getDescriptionBytes() { java.lang.Object ref = description_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); description_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @param value The description to set. * @return This builder for chaining. */ public Builder setDescription(java.lang.String value) { if (value == null) { throw new NullPointerException(); } description_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @return This builder for chaining. */ public Builder clearDescription() { description_ = getDefaultInstance().getDescription(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Human readable description for better understanding an intent like its * scope, content, result etc. Maximum character limit: 140 characters. * </pre> * * <code>string description = 5;</code> * * @param value The bytes for description to set. * @return This builder for chaining. */ public Builder setDescriptionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); description_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.IntentSuggestion) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.IntentSuggestion) private static final com.google.cloud.dialogflow.v2beta1.IntentSuggestion DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.IntentSuggestion(); } public static com.google.cloud.dialogflow.v2beta1.IntentSuggestion getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<IntentSuggestion> PARSER = new com.google.protobuf.AbstractParser<IntentSuggestion>() { @java.lang.Override public IntentSuggestion parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<IntentSuggestion> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<IntentSuggestion> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.IntentSuggestion getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,374
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/ClearSuggestionFeatureConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/conversation_profile.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * The request message for [ConversationProfiles.ClearFeature][]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest} */ public final class ClearSuggestionFeatureConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) ClearSuggestionFeatureConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ClearSuggestionFeatureConfigRequest.newBuilder() to construct. private ClearSuggestionFeatureConfigRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ClearSuggestionFeatureConfigRequest() { conversationProfile_ = ""; participantRole_ = 0; suggestionFeatureType_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ClearSuggestionFeatureConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto .internal_static_google_cloud_dialogflow_v2beta1_ClearSuggestionFeatureConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto .internal_static_google_cloud_dialogflow_v2beta1_ClearSuggestionFeatureConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest.class, com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest.Builder.class); } public static final int CONVERSATION_PROFILE_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object conversationProfile_ = ""; /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The conversationProfile. */ @java.lang.Override public java.lang.String getConversationProfile() { java.lang.Object ref = conversationProfile_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); conversationProfile_ = s; return s; } } /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for conversationProfile. */ @java.lang.Override public com.google.protobuf.ByteString getConversationProfileBytes() { java.lang.Object ref = conversationProfile_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); conversationProfile_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PARTICIPANT_ROLE_FIELD_NUMBER = 2; private int participantRole_ = 0; /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for participantRole. */ @java.lang.Override public int getParticipantRoleValue() { return participantRole_; } /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The participantRole. */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.Participant.Role getParticipantRole() { com.google.cloud.dialogflow.v2beta1.Participant.Role result = com.google.cloud.dialogflow.v2beta1.Participant.Role.forNumber(participantRole_); return result == null ? com.google.cloud.dialogflow.v2beta1.Participant.Role.UNRECOGNIZED : result; } public static final int SUGGESTION_FEATURE_TYPE_FIELD_NUMBER = 3; private int suggestionFeatureType_ = 0; /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for suggestionFeatureType. */ @java.lang.Override public int getSuggestionFeatureTypeValue() { return suggestionFeatureType_; } /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The suggestionFeatureType. */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type getSuggestionFeatureType() { com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type result = com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type.forNumber( suggestionFeatureType_); return result == null ? com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(conversationProfile_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, conversationProfile_); } if (participantRole_ != com.google.cloud.dialogflow.v2beta1.Participant.Role.ROLE_UNSPECIFIED.getNumber()) { output.writeEnum(2, participantRole_); } if (suggestionFeatureType_ != com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type.TYPE_UNSPECIFIED .getNumber()) { output.writeEnum(3, suggestionFeatureType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(conversationProfile_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, conversationProfile_); } if (participantRole_ != com.google.cloud.dialogflow.v2beta1.Participant.Role.ROLE_UNSPECIFIED.getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, participantRole_); } if (suggestionFeatureType_ != com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type.TYPE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, suggestionFeatureType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest other = (com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) obj; if (!getConversationProfile().equals(other.getConversationProfile())) return false; if (participantRole_ != other.participantRole_) return false; if (suggestionFeatureType_ != other.suggestionFeatureType_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONVERSATION_PROFILE_FIELD_NUMBER; hash = (53 * hash) + getConversationProfile().hashCode(); hash = (37 * hash) + PARTICIPANT_ROLE_FIELD_NUMBER; hash = (53 * hash) + participantRole_; hash = (37 * hash) + SUGGESTION_FEATURE_TYPE_FIELD_NUMBER; hash = (53 * hash) + suggestionFeatureType_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for [ConversationProfiles.ClearFeature][]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto .internal_static_google_cloud_dialogflow_v2beta1_ClearSuggestionFeatureConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto .internal_static_google_cloud_dialogflow_v2beta1_ClearSuggestionFeatureConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest.class, com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest.Builder .class); } // Construct using // com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; conversationProfile_ = ""; participantRole_ = 0; suggestionFeatureType_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.ConversationProfileProto .internal_static_google_cloud_dialogflow_v2beta1_ClearSuggestionFeatureConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest build() { com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest buildPartial() { com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest result = new com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.conversationProfile_ = conversationProfile_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.participantRole_ = participantRole_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.suggestionFeatureType_ = suggestionFeatureType_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) { return mergeFrom( (com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest other) { if (other == com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest .getDefaultInstance()) return this; if (!other.getConversationProfile().isEmpty()) { conversationProfile_ = other.conversationProfile_; bitField0_ |= 0x00000001; onChanged(); } if (other.participantRole_ != 0) { setParticipantRoleValue(other.getParticipantRoleValue()); } if (other.suggestionFeatureType_ != 0) { setSuggestionFeatureTypeValue(other.getSuggestionFeatureTypeValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { conversationProfile_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { participantRole_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 24: { suggestionFeatureType_ = input.readEnum(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object conversationProfile_ = ""; /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The conversationProfile. */ public java.lang.String getConversationProfile() { java.lang.Object ref = conversationProfile_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); conversationProfile_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for conversationProfile. */ public com.google.protobuf.ByteString getConversationProfileBytes() { java.lang.Object ref = conversationProfile_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); conversationProfile_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The conversationProfile to set. * @return This builder for chaining. */ public Builder setConversationProfile(java.lang.String value) { if (value == null) { throw new NullPointerException(); } conversationProfile_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearConversationProfile() { conversationProfile_ = getDefaultInstance().getConversationProfile(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The Conversation Profile to add or update the suggestion feature * config. Format: `projects/&lt;Project ID&gt;/locations/&lt;Location * ID&gt;/conversationProfiles/&lt;Conversation Profile ID&gt;`. * </pre> * * <code>string conversation_profile = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for conversationProfile to set. * @return This builder for chaining. */ public Builder setConversationProfileBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); conversationProfile_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int participantRole_ = 0; /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for participantRole. */ @java.lang.Override public int getParticipantRoleValue() { return participantRole_; } /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for participantRole to set. * @return This builder for chaining. */ public Builder setParticipantRoleValue(int value) { participantRole_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The participantRole. */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.Participant.Role getParticipantRole() { com.google.cloud.dialogflow.v2beta1.Participant.Role result = com.google.cloud.dialogflow.v2beta1.Participant.Role.forNumber(participantRole_); return result == null ? com.google.cloud.dialogflow.v2beta1.Participant.Role.UNRECOGNIZED : result; } /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The participantRole to set. * @return This builder for chaining. */ public Builder setParticipantRole(com.google.cloud.dialogflow.v2beta1.Participant.Role value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; participantRole_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The participant role to remove the suggestion feature * config. Only HUMAN_AGENT or END_USER can be used. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.Participant.Role participant_role = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearParticipantRole() { bitField0_ = (bitField0_ & ~0x00000002); participantRole_ = 0; onChanged(); return this; } private int suggestionFeatureType_ = 0; /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for suggestionFeatureType. */ @java.lang.Override public int getSuggestionFeatureTypeValue() { return suggestionFeatureType_; } /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for suggestionFeatureType to set. * @return This builder for chaining. */ public Builder setSuggestionFeatureTypeValue(int value) { suggestionFeatureType_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The suggestionFeatureType. */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type getSuggestionFeatureType() { com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type result = com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type.forNumber( suggestionFeatureType_); return result == null ? com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type.UNRECOGNIZED : result; } /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The suggestionFeatureType to set. * @return This builder for chaining. */ public Builder setSuggestionFeatureType( com.google.cloud.dialogflow.v2beta1.SuggestionFeature.Type value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; suggestionFeatureType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The type of the suggestion feature to remove. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.SuggestionFeature.Type suggestion_feature_type = 3 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearSuggestionFeatureType() { bitField0_ = (bitField0_ & ~0x00000004); suggestionFeatureType_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest) private static final com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest(); } public static com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ClearSuggestionFeatureConfigRequest> PARSER = new com.google.protobuf.AbstractParser<ClearSuggestionFeatureConfigRequest>() { @java.lang.Override public ClearSuggestionFeatureConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ClearSuggestionFeatureConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ClearSuggestionFeatureConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.ClearSuggestionFeatureConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,249
java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/ReviewDocumentResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1/document_processor_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1; /** * * * <pre> * Response message for the * [ReviewDocument][google.cloud.documentai.v1.DocumentProcessorService.ReviewDocument] * method. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.ReviewDocumentResponse} */ public final class ReviewDocumentResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.ReviewDocumentResponse) ReviewDocumentResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ReviewDocumentResponse.newBuilder() to construct. private ReviewDocumentResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ReviewDocumentResponse() { gcsDestination_ = ""; state_ = 0; rejectionReason_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ReviewDocumentResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ReviewDocumentResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ReviewDocumentResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.ReviewDocumentResponse.class, com.google.cloud.documentai.v1.ReviewDocumentResponse.Builder.class); } /** * * * <pre> * Possible states of the review operation. * </pre> * * Protobuf enum {@code google.cloud.documentai.v1.ReviewDocumentResponse.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * The default value. This value is used if the state is omitted. * </pre> * * <code>STATE_UNSPECIFIED = 0;</code> */ STATE_UNSPECIFIED(0), /** * * * <pre> * The review operation is rejected by the reviewer. * </pre> * * <code>REJECTED = 1;</code> */ REJECTED(1), /** * * * <pre> * The review operation is succeeded. * </pre> * * <code>SUCCEEDED = 2;</code> */ SUCCEEDED(2), UNRECOGNIZED(-1), ; /** * * * <pre> * The default value. This value is used if the state is omitted. * </pre> * * <code>STATE_UNSPECIFIED = 0;</code> */ public static final int STATE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * The review operation is rejected by the reviewer. * </pre> * * <code>REJECTED = 1;</code> */ public static final int REJECTED_VALUE = 1; /** * * * <pre> * The review operation is succeeded. * </pre> * * <code>SUCCEEDED = 2;</code> */ public static final int SUCCEEDED_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static State valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static State forNumber(int value) { switch (value) { case 0: return STATE_UNSPECIFIED; case 1: return REJECTED; case 2: return SUCCEEDED; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.documentai.v1.ReviewDocumentResponse.getDescriptor() .getEnumTypes() .get(0); } private static final State[] VALUES = values(); public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private State(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.documentai.v1.ReviewDocumentResponse.State) } public static final int GCS_DESTINATION_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object gcsDestination_ = ""; /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @return The gcsDestination. */ @java.lang.Override public java.lang.String getGcsDestination() { java.lang.Object ref = gcsDestination_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gcsDestination_ = s; return s; } } /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @return The bytes for gcsDestination. */ @java.lang.Override public com.google.protobuf.ByteString getGcsDestinationBytes() { java.lang.Object ref = gcsDestination_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gcsDestination_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int STATE_FIELD_NUMBER = 2; private int state_ = 0; /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @return The enum numeric value on the wire for state. */ @java.lang.Override public int getStateValue() { return state_; } /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @return The state. */ @java.lang.Override public com.google.cloud.documentai.v1.ReviewDocumentResponse.State getState() { com.google.cloud.documentai.v1.ReviewDocumentResponse.State result = com.google.cloud.documentai.v1.ReviewDocumentResponse.State.forNumber(state_); return result == null ? com.google.cloud.documentai.v1.ReviewDocumentResponse.State.UNRECOGNIZED : result; } public static final int REJECTION_REASON_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object rejectionReason_ = ""; /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @return The rejectionReason. */ @java.lang.Override public java.lang.String getRejectionReason() { java.lang.Object ref = rejectionReason_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rejectionReason_ = s; return s; } } /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @return The bytes for rejectionReason. */ @java.lang.Override public com.google.protobuf.ByteString getRejectionReasonBytes() { java.lang.Object ref = rejectionReason_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rejectionReason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsDestination_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, gcsDestination_); } if (state_ != com.google.cloud.documentai.v1.ReviewDocumentResponse.State.STATE_UNSPECIFIED .getNumber()) { output.writeEnum(2, state_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rejectionReason_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rejectionReason_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsDestination_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, gcsDestination_); } if (state_ != com.google.cloud.documentai.v1.ReviewDocumentResponse.State.STATE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, state_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rejectionReason_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rejectionReason_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1.ReviewDocumentResponse)) { return super.equals(obj); } com.google.cloud.documentai.v1.ReviewDocumentResponse other = (com.google.cloud.documentai.v1.ReviewDocumentResponse) obj; if (!getGcsDestination().equals(other.getGcsDestination())) return false; if (state_ != other.state_) return false; if (!getRejectionReason().equals(other.getRejectionReason())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + GCS_DESTINATION_FIELD_NUMBER; hash = (53 * hash) + getGcsDestination().hashCode(); hash = (37 * hash) + STATE_FIELD_NUMBER; hash = (53 * hash) + state_; hash = (37 * hash) + REJECTION_REASON_FIELD_NUMBER; hash = (53 * hash) + getRejectionReason().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1.ReviewDocumentResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for the * [ReviewDocument][google.cloud.documentai.v1.DocumentProcessorService.ReviewDocument] * method. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.ReviewDocumentResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.ReviewDocumentResponse) com.google.cloud.documentai.v1.ReviewDocumentResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ReviewDocumentResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ReviewDocumentResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.ReviewDocumentResponse.class, com.google.cloud.documentai.v1.ReviewDocumentResponse.Builder.class); } // Construct using com.google.cloud.documentai.v1.ReviewDocumentResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; gcsDestination_ = ""; state_ = 0; rejectionReason_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1_ReviewDocumentResponse_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1.ReviewDocumentResponse getDefaultInstanceForType() { return com.google.cloud.documentai.v1.ReviewDocumentResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1.ReviewDocumentResponse build() { com.google.cloud.documentai.v1.ReviewDocumentResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1.ReviewDocumentResponse buildPartial() { com.google.cloud.documentai.v1.ReviewDocumentResponse result = new com.google.cloud.documentai.v1.ReviewDocumentResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.documentai.v1.ReviewDocumentResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.gcsDestination_ = gcsDestination_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.state_ = state_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.rejectionReason_ = rejectionReason_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1.ReviewDocumentResponse) { return mergeFrom((com.google.cloud.documentai.v1.ReviewDocumentResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1.ReviewDocumentResponse other) { if (other == com.google.cloud.documentai.v1.ReviewDocumentResponse.getDefaultInstance()) return this; if (!other.getGcsDestination().isEmpty()) { gcsDestination_ = other.gcsDestination_; bitField0_ |= 0x00000001; onChanged(); } if (other.state_ != 0) { setStateValue(other.getStateValue()); } if (!other.getRejectionReason().isEmpty()) { rejectionReason_ = other.rejectionReason_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { gcsDestination_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { state_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { rejectionReason_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object gcsDestination_ = ""; /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @return The gcsDestination. */ public java.lang.String getGcsDestination() { java.lang.Object ref = gcsDestination_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); gcsDestination_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @return The bytes for gcsDestination. */ public com.google.protobuf.ByteString getGcsDestinationBytes() { java.lang.Object ref = gcsDestination_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); gcsDestination_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @param value The gcsDestination to set. * @return This builder for chaining. */ public Builder setGcsDestination(java.lang.String value) { if (value == null) { throw new NullPointerException(); } gcsDestination_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @return This builder for chaining. */ public Builder clearGcsDestination() { gcsDestination_ = getDefaultInstance().getGcsDestination(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The Cloud Storage uri for the human reviewed document if the review is * succeeded. * </pre> * * <code>string gcs_destination = 1;</code> * * @param value The bytes for gcsDestination to set. * @return This builder for chaining. */ public Builder setGcsDestinationBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); gcsDestination_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int state_ = 0; /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @return The enum numeric value on the wire for state. */ @java.lang.Override public int getStateValue() { return state_; } /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @param value The enum numeric value on the wire for state to set. * @return This builder for chaining. */ public Builder setStateValue(int value) { state_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @return The state. */ @java.lang.Override public com.google.cloud.documentai.v1.ReviewDocumentResponse.State getState() { com.google.cloud.documentai.v1.ReviewDocumentResponse.State result = com.google.cloud.documentai.v1.ReviewDocumentResponse.State.forNumber(state_); return result == null ? com.google.cloud.documentai.v1.ReviewDocumentResponse.State.UNRECOGNIZED : result; } /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @param value The state to set. * @return This builder for chaining. */ public Builder setState(com.google.cloud.documentai.v1.ReviewDocumentResponse.State value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; state_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The state of the review operation. * </pre> * * <code>.google.cloud.documentai.v1.ReviewDocumentResponse.State state = 2;</code> * * @return This builder for chaining. */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); state_ = 0; onChanged(); return this; } private java.lang.Object rejectionReason_ = ""; /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @return The rejectionReason. */ public java.lang.String getRejectionReason() { java.lang.Object ref = rejectionReason_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rejectionReason_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @return The bytes for rejectionReason. */ public com.google.protobuf.ByteString getRejectionReasonBytes() { java.lang.Object ref = rejectionReason_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rejectionReason_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @param value The rejectionReason to set. * @return This builder for chaining. */ public Builder setRejectionReason(java.lang.String value) { if (value == null) { throw new NullPointerException(); } rejectionReason_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @return This builder for chaining. */ public Builder clearRejectionReason() { rejectionReason_ = getDefaultInstance().getRejectionReason(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The reason why the review is rejected by reviewer. * </pre> * * <code>string rejection_reason = 3;</code> * * @param value The bytes for rejectionReason to set. * @return This builder for chaining. */ public Builder setRejectionReasonBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); rejectionReason_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.ReviewDocumentResponse) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.ReviewDocumentResponse) private static final com.google.cloud.documentai.v1.ReviewDocumentResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.ReviewDocumentResponse(); } public static com.google.cloud.documentai.v1.ReviewDocumentResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ReviewDocumentResponse> PARSER = new com.google.protobuf.AbstractParser<ReviewDocumentResponse>() { @java.lang.Override public ReviewDocumentResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ReviewDocumentResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ReviewDocumentResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1.ReviewDocumentResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,278
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/ListActionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * List actions response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListActionsResponse} */ public final class ListActionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.ListActionsResponse) ListActionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListActionsResponse.newBuilder() to construct. private ListActionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListActionsResponse() { actions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListActionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListActionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListActionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListActionsResponse.class, com.google.cloud.dataplex.v1.ListActionsResponse.Builder.class); } public static final int ACTIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dataplex.v1.Action> actions_; /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dataplex.v1.Action> getActionsList() { return actions_; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dataplex.v1.ActionOrBuilder> getActionsOrBuilderList() { return actions_; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ @java.lang.Override public int getActionsCount() { return actions_.size(); } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.Action getActions(int index) { return actions_.get(index); } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.ActionOrBuilder getActionsOrBuilder(int index) { return actions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < actions_.size(); i++) { output.writeMessage(1, actions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < actions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, actions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.ListActionsResponse)) { return super.equals(obj); } com.google.cloud.dataplex.v1.ListActionsResponse other = (com.google.cloud.dataplex.v1.ListActionsResponse) obj; if (!getActionsList().equals(other.getActionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getActionsCount() > 0) { hash = (37 * hash) + ACTIONS_FIELD_NUMBER; hash = (53 * hash) + getActionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListActionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.ListActionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List actions response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListActionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.ListActionsResponse) com.google.cloud.dataplex.v1.ListActionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListActionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListActionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListActionsResponse.class, com.google.cloud.dataplex.v1.ListActionsResponse.Builder.class); } // Construct using com.google.cloud.dataplex.v1.ListActionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (actionsBuilder_ == null) { actions_ = java.util.Collections.emptyList(); } else { actions_ = null; actionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListActionsResponse_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.ListActionsResponse getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.ListActionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.ListActionsResponse build() { com.google.cloud.dataplex.v1.ListActionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.ListActionsResponse buildPartial() { com.google.cloud.dataplex.v1.ListActionsResponse result = new com.google.cloud.dataplex.v1.ListActionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dataplex.v1.ListActionsResponse result) { if (actionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { actions_ = java.util.Collections.unmodifiableList(actions_); bitField0_ = (bitField0_ & ~0x00000001); } result.actions_ = actions_; } else { result.actions_ = actionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.dataplex.v1.ListActionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.ListActionsResponse) { return mergeFrom((com.google.cloud.dataplex.v1.ListActionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.ListActionsResponse other) { if (other == com.google.cloud.dataplex.v1.ListActionsResponse.getDefaultInstance()) return this; if (actionsBuilder_ == null) { if (!other.actions_.isEmpty()) { if (actions_.isEmpty()) { actions_ = other.actions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureActionsIsMutable(); actions_.addAll(other.actions_); } onChanged(); } } else { if (!other.actions_.isEmpty()) { if (actionsBuilder_.isEmpty()) { actionsBuilder_.dispose(); actionsBuilder_ = null; actions_ = other.actions_; bitField0_ = (bitField0_ & ~0x00000001); actionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getActionsFieldBuilder() : null; } else { actionsBuilder_.addAllMessages(other.actions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dataplex.v1.Action m = input.readMessage( com.google.cloud.dataplex.v1.Action.parser(), extensionRegistry); if (actionsBuilder_ == null) { ensureActionsIsMutable(); actions_.add(m); } else { actionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dataplex.v1.Action> actions_ = java.util.Collections.emptyList(); private void ensureActionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { actions_ = new java.util.ArrayList<com.google.cloud.dataplex.v1.Action>(actions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Action, com.google.cloud.dataplex.v1.Action.Builder, com.google.cloud.dataplex.v1.ActionOrBuilder> actionsBuilder_; /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Action> getActionsList() { if (actionsBuilder_ == null) { return java.util.Collections.unmodifiableList(actions_); } else { return actionsBuilder_.getMessageList(); } } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public int getActionsCount() { if (actionsBuilder_ == null) { return actions_.size(); } else { return actionsBuilder_.getCount(); } } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public com.google.cloud.dataplex.v1.Action getActions(int index) { if (actionsBuilder_ == null) { return actions_.get(index); } else { return actionsBuilder_.getMessage(index); } } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder setActions(int index, com.google.cloud.dataplex.v1.Action value) { if (actionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureActionsIsMutable(); actions_.set(index, value); onChanged(); } else { actionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder setActions( int index, com.google.cloud.dataplex.v1.Action.Builder builderForValue) { if (actionsBuilder_ == null) { ensureActionsIsMutable(); actions_.set(index, builderForValue.build()); onChanged(); } else { actionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder addActions(com.google.cloud.dataplex.v1.Action value) { if (actionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureActionsIsMutable(); actions_.add(value); onChanged(); } else { actionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder addActions(int index, com.google.cloud.dataplex.v1.Action value) { if (actionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureActionsIsMutable(); actions_.add(index, value); onChanged(); } else { actionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder addActions(com.google.cloud.dataplex.v1.Action.Builder builderForValue) { if (actionsBuilder_ == null) { ensureActionsIsMutable(); actions_.add(builderForValue.build()); onChanged(); } else { actionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder addActions( int index, com.google.cloud.dataplex.v1.Action.Builder builderForValue) { if (actionsBuilder_ == null) { ensureActionsIsMutable(); actions_.add(index, builderForValue.build()); onChanged(); } else { actionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder addAllActions( java.lang.Iterable<? extends com.google.cloud.dataplex.v1.Action> values) { if (actionsBuilder_ == null) { ensureActionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, actions_); onChanged(); } else { actionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder clearActions() { if (actionsBuilder_ == null) { actions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { actionsBuilder_.clear(); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public Builder removeActions(int index) { if (actionsBuilder_ == null) { ensureActionsIsMutable(); actions_.remove(index); onChanged(); } else { actionsBuilder_.remove(index); } return this; } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public com.google.cloud.dataplex.v1.Action.Builder getActionsBuilder(int index) { return getActionsFieldBuilder().getBuilder(index); } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public com.google.cloud.dataplex.v1.ActionOrBuilder getActionsOrBuilder(int index) { if (actionsBuilder_ == null) { return actions_.get(index); } else { return actionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public java.util.List<? extends com.google.cloud.dataplex.v1.ActionOrBuilder> getActionsOrBuilderList() { if (actionsBuilder_ != null) { return actionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(actions_); } } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public com.google.cloud.dataplex.v1.Action.Builder addActionsBuilder() { return getActionsFieldBuilder() .addBuilder(com.google.cloud.dataplex.v1.Action.getDefaultInstance()); } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public com.google.cloud.dataplex.v1.Action.Builder addActionsBuilder(int index) { return getActionsFieldBuilder() .addBuilder(index, com.google.cloud.dataplex.v1.Action.getDefaultInstance()); } /** * * * <pre> * Actions under the given parent lake/zone/asset. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Action actions = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Action.Builder> getActionsBuilderList() { return getActionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Action, com.google.cloud.dataplex.v1.Action.Builder, com.google.cloud.dataplex.v1.ActionOrBuilder> getActionsFieldBuilder() { if (actionsBuilder_ == null) { actionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Action, com.google.cloud.dataplex.v1.Action.Builder, com.google.cloud.dataplex.v1.ActionOrBuilder>( actions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); actions_ = null; } return actionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.ListActionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.ListActionsResponse) private static final com.google.cloud.dataplex.v1.ListActionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.ListActionsResponse(); } public static com.google.cloud.dataplex.v1.ListActionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListActionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListActionsResponse>() { @java.lang.Override public ListActionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListActionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListActionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.ListActionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
google/s2-geometry-library-java
35,605
library/src/com/google/common/geometry/S2ClosestEdgeQuery.java
/* * Copyright 2022 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.geometry; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.VisibleForTesting; import com.google.errorprone.annotations.CanIgnoreReturnValue; import com.google.errorprone.annotations.CheckReturnValue; import java.util.Comparator; import java.util.List; import java.util.Optional; import org.jspecify.annotations.Nullable; /** * {@link S2ClosestEdgeQuery} is a helper class for searching within an {@link S2ShapeIndex} to find * the closest edge(s) to a given target point, edge, s2 cell, or {@link S2ShapeIndex} geometry * collection. Closest edges are those with minimum distance from any point on that edge to any * point on the target geometry. * * <p>For example, given a set of polylines, the following code efficiently finds up to the closest * 100 edges of those polylines to a query target point, and at most 5km from it. Then, the single * polyline edge closest to any edge in a second target ShapeIndex is found. * * <p>This example code demonstrates the use of the provided convenience methods for working with * S2ClosestEdgeQuery using an S1Distance abstract type that is specifically S1ChordAngle. This is * recommended for most clients. * * <pre>{@code class ClosestEdgeDemo { // Recommended: S2Earth.toAngle(new Length.Kilometers(5)); static final S1Angle MAX_DISTANCE = S1Angle.fromEarthDistance(5000); public static void example( List<S2Polyline> polylines, S2ShapeIndex targetIndex, S2Point targetPoint) { // Add the given polylines to a shape index. S2ShapeIndex polylinesIndex = new S2ShapeIndex(); for (S2Polyline polyline : polylines) { polylinesIndex.add(S2LaxPolylineShape.create(polyline)); } // Create a Builder, modifying the default values with a maximum distance and number of // results. S2ClosestEdgeQuery.Builder builder = S2ClosestEdgeQuery.builder().setInclusiveMaxDistance(MAX_DISTANCE).setMaxResults(100); // Construct the query. S2ClosestEdgeQuery.Query query = builder.build(polylinesIndex); // Create a target that is a single point. S2ClosestEdgeQuery.PointTarget<S1ChordAngle> pointTarget = new S2ClosestEdgeQuery.PointTarget<>(targetPoint); // Find and visit up to 100 polyline edges in polylinesIndex that are the closest to the // target point, and at most 5 km from it. query.findClosestEdges( pointTarget, (S1ChordAngle distance, int shapeId, int edgeId) -> { // Do something with each of these closest edge results. return true; }); // Create a target that is another shapeIndex. S2ClosestEdgeQuery.ShapeIndexTarget<S1ChordAngle> shapeIndexTarget = S2ClosestEdgeQuery.createShapeIndexTarget(targetIndex); shapeIndexTarget.setIncludeInteriors(true); // Find the single closest polyline edge to any edge or polygon interior in the // shapeIndexTarget, if any matches the query options, i.e. at most 5000 meters. Optional<S2BestEdgesQueryBase.Result<S1ChordAngle>> result = query.findClosestEdge(shapeIndexTarget); // If the result.isPresent(), it contains distance(), shapeId(), and edgeId(). if (result.isPresent() && !result.get().isInterior()) { // Get the actual closest polyline edge endpoints from the Result shape and edge id. S2Shape.MutableEdge edge = new S2Shape.MutableEdge(); query.getEdge(result.get(), edge); // Use the edge endpoints. } } } * }</pre> * Query options are immutable and are set on the Builder, which is then used to construct queries * as shown in the example above. To find the closest edges to a query edge rather than a point, * use: * * {@snippet : * S2ClosestEdgeQuery.EdgeTarget<S1ChordAngle> target = new S2ClosestEdgeQuery.EdgeTarget<>(v0, v1); * query.findClosestEdges(target); * } * * <p>Similarly you can find the closest edges to an S2Cell by using a {@link * S2ClosestEdgeQuery.CellTarget}, and you can find the closest edges to an arbitrary collection of * points, polylines, and polygons by using an {@link S2ClosestEdgeQuery.ShapeIndexTarget}. * * <p>There are two overloads of the findClosestEdges() method: * * <ol> * <li>Users may simply get a returned list of Result objects. However, for cases where many calls * to findClosestEdges will be made for a single query, the alternative can be much more * efficient. * <li>Users may instead provide a ResultVisitor that accepts (distance, shape, edge id) tuples. * This avoids boxing values, but it does fully compute the set of results before any are * visited. * </ol> * * The {@link Result} object contains the following accessors: * * <ul> * <li>{@link Result#distance()} is the distance to the edge. * <li>{@link Result#shapeId()} is the index of the S2Shape containing the edge in the index. * <li>{@link Result#edgeId()} identifies the edge with the given shape. * <li>{@link Result#isInterior()} indicates that the result is an interior point. * </ul> * * <p>{@link S2ClosestEdgeQuery#project(S2Point, Result)} computes the closest point on the Result * edge to the given S2Point. * * <p>You can find either the k closest edges, or all edges within a given radius, or both (i.e., * the k closest edges up to a given maximum radius). E.g. to find all the edges within 5 * kilometers, call * * {@snippet : * options.setMaxDistance(S2Earth.toAngle(new Length.Kilometers(5))); * } * * <p>By default, *all* edges are returned, so you should always specify either maxResults() or * maxDistance() or both. There is also a findClosestEdge() convenience method that returns only the * single closest edge. * * <p>Note that by default, distances are measured to the boundary and interior of polygons. For * example, if a point is inside a polygon then its distance is zero. To change this behavior, * setIncludeInteriors(false) when building the query options. * * <p>If you only need to test whether the distance is below a given threshold (e.g., 10 km), you * can use the isDistanceLess() method. This is much faster than actually calculating the distance * with findClosestEdge(), since the implementation can stop as soon as it can prove that the * minimum distance is either above or below the threshold. * * <p>The implementation is designed to be fast for both simple and complex geometric objects. */ @CheckReturnValue public abstract class S2ClosestEdgeQuery<D extends S1Distance<D>> extends S2BestEdgesQueryBase<D> { /** Target is the base interface of all the S2ClosestEdgeQuery-specific targets. */ public interface Target<D extends S1Distance<D>> extends S2BestDistanceTarget<D> {} /** A target for finding the closest edges to a point. */ public static class PointTarget<D2 extends S1Distance<D2>> extends S2BestEdgesQueryBase.PointTarget<D2> implements Target<D2> { public PointTarget(S2Point p) { super(p); } /** See {@code S2ClosestEdgeQueryBenchmark.DetermineGoodBruteForceIndexSizes}. */ @Override public int maxBruteForceIndexSize() { return 40; } @Override public S2Cap getCapBound() { return S2Cap.fromAxisChord(point, S1ChordAngle.ZERO); } } /** A target for finding the closest edges to an edge. */ public static class EdgeTarget<D2 extends S1Distance<D2>> extends S2BestEdgesQueryBase.EdgeTarget<D2> implements Target<D2> { public EdgeTarget(S2Point a, S2Point b) { super(a, b); } /** See {@code S2ClosestEdgeQueryBenchmark.DetermineGoodBruteForceIndexSizes}. */ @Override public int maxBruteForceIndexSize() { return 40; } @Override public S2Cap getCapBound() { double r2 = getHalfEdgeLength2(); return S2Cap.fromAxisChord(a.add(b).normalize(), S1ChordAngle.fromLength2(r2)); } } /** A target for finding the closest edges to an S2Cell. */ public static class CellTarget<D2 extends S1Distance<D2>> extends S2BestEdgesQueryBase.CellTarget<D2> implements Target<D2> { public static final int MAX_BRUTE_FORCE_INDEX_SIZE = 16; public CellTarget(S2Cell c) { super(c); } /** See {@code S2ClosestEdgeQueryBenchmark.DetermineGoodBruteForceIndexSizes}. */ @Override public int maxBruteForceIndexSize() { return MAX_BRUTE_FORCE_INDEX_SIZE; } @Override public S2Cap getCapBound() { return cell.getCapBound(); } } /** * A target for finding the closest edges to an S2ShapeIndex. Shape interiors are included by * default. */ public static class ShapeIndexTarget<D extends S1Distance<D>> extends S2BestEdgesQueryBase.ShapeIndexTarget<D> implements Target<D> { /** * Clients using S1ChordAngle as their S1Distance type may find it convenient to use * {@link S2ClosestEdgeQuery#createShapeIndexTarget(S2ShapeIndex)}. * * <p>Otherwise, constructing a ShapeIndexTarget for a specific S1Distance type requires * providing a S2BestEdgesQueryBase.Builder for closest edges and the templated S1Distance * type. */ public ShapeIndexTarget(S2ShapeIndex index, S2BestEdgesQueryBase.Builder<D> queryBuilder) { super(index, queryBuilder); } /** See {@code S2ClosestEdgeQueryBenchmark.DetermineGoodBruteForceIndexSizes}. */ @Override public int maxBruteForceIndexSize() { return 40; } @Override @CanIgnoreReturnValue public boolean updateBestDistance(S2Point p, DistanceCollector<D> collector) { return updateBestDistance(new PointTarget<D>(p), collector); } @Override @CanIgnoreReturnValue public boolean updateBestDistance(S2Point v0, S2Point v1, DistanceCollector<D> collector) { return updateBestDistance(new EdgeTarget<D>(v0, v1), collector); } @Override @CanIgnoreReturnValue public boolean updateBestDistance(S2Cell cell, DistanceCollector<D> collector) { return updateBestDistance(new CellTarget<D>(cell), collector); } @Override public S2Cap getCapBound() { // TODO(torrey): This is only called once, from S2BestEdgesQueryBase.initQueue. It might be // better to rework that algorithm to use a covering of the target directly, rather than // getting a cap, enlarging it, and covering that, particularly here where the cap is over // a covering. // If the index hasn't been built and the number of edges is sufficiently small, we avoid // building the index just to compute the cap bound here. If the index has more than // CellTarget.MAX_BRUTE_FORCE_INDEX_SIZE edges, it will be built later anyway. int maxSize = CellTarget.MAX_BRUTE_FORCE_INDEX_SIZE; if (!index.isFresh() && S2ShapeUtil.countEdgesUpTo(index, maxSize) < maxSize) { S2Cap.Builder builder = new S2Cap.Builder(); S2Shape.MutableEdge e = new S2Shape.MutableEdge(); for (S2Shape shape : index.getShapes()) { for (int i = 0; i < shape.numEdges(); i++) { shape.getEdge(i, e); builder.add(e); } } return builder.build(); } return new S2ShapeIndexRegion(index).getCapBound(); } } /** The constructor is internal, as clients should use the Builder to construct queries. */ S2ClosestEdgeQuery(Options<D> options) { super(options); } /** * Returns the closest (shape, edge, distance) Results to the given target that satisfy the * current options. The returned list is sorted by increasing edge distance from the target. This * method may be called multiple times, with the same or different targets and options. Note that * the findClosestEdges() methods below that take a visitor are more efficient, particularly if * many calls are made. * * <p>See {@link #findClosestEdges(S2BestDistanceTarget, ShapeFilter, ResultVisitor)} for more * details. */ public List<Result<D>> findClosestEdges(S2BestDistanceTarget<D> target) { return findBestEdges(target); } /** * Returns the closest (shape, edge, distance) Results to the given target that satisfy the * current options, and with shapes that pass the given ShapeFilter. The returned list is sorted * by increasing distance from the target. This method may be called multiple times, with the same * or different targets, shapeFilter, and options. Note that the findClosestEdges() methods below * that take a visitor are more efficient, particularly if many calls are made. * * <p>See {@link #findClosestEdges(S2BestDistanceTarget, ShapeFilter, ResultVisitor)} for more * details. */ public List<Result<D>> findClosestEdges( S2BestDistanceTarget<D> target, @Nullable ShapeFilter shapeFilter) { return findBestEdges(target, shapeFilter); } /** * Visits the closest (shape, edge, distance) Results to the given target that satisfy the current * options. This method may be called multiple times, with the same or different targets * and options. Results are visited in order of increasing distance from the target. * * <p>See {@link #findClosestEdges(S2BestDistanceTarget, ShapeFilter, ResultVisitor)} for more * details. */ public void findClosestEdges(S2BestDistanceTarget<D> target, ResultVisitor<D> visitor) { findBestEdges(target, visitor); } /** * Visits the closest (shape, edge, distance) Results to the given target that satisfy the current * options, and with shapes that pass the given ShapeFilter. This method may be called multiple * times, with the same or different targets, options, and ShapeFilters. Results are visited in * order of increasing distance from the target. * * <p>Note that if {@code options().includeInteriors()} is true, the results may include some * entries with edgeId == -1. This indicates that the target is contained by or intersects the * indexed polygon with the returned shapeId. Such results may also be identified by calling * {@link Result#isInterior()}. * * <p>The ShapeFilter may return different values for the same shapeId as it is repeatedly called. * For instance, the filter could accept a given shapeId the first time it is seen, and false * afterwards. If you only need to find the closest shapes to a target, this technique can speed * up the query significantly by returning fewer edges, especially for large indexes. * * <p>However, the ShapeFilter isn't called for every edge: a single passed filter test may * produce Results for many or even all the edges of that shape. Also, the Results produced would * not necessarily be the closest possible Results, as edges are not discovered in distance order * and the ShapeFilter is not called for edges in distance order. Finally, note that filtering * shapes and visiting results are not interleaved in the current implementation. First, edges and * shapes are discovered and filtered, and Results collected. Then the remaining Results are * visited in distance order. */ public void findClosestEdges( S2BestDistanceTarget<D> target, @Nullable ShapeFilter shapeFilter, ResultVisitor<D> visitor) { findBestEdges(target, shapeFilter, visitor); } /** * Returns the closest (shape, edge, distance) Result to the target, if one satisfies the current * options. Otherwise, the returned Optional will not be present. * * <p>See {@link #findClosestEdges(S2BestDistanceTarget, ShapeFilter, ResultVisitor)} for more * details. */ public Optional<Result<D>> findClosestEdge(S2BestDistanceTarget<D> target) { return findBestEdge(target); } /** * Returns the closest (shape, edge, distance) Result to the target, if one satisfies the current * options with a shape id that passes the given ShapeFilter. Otherwise, the returned Optional * will not be present. * * <p>See {@link #findClosestEdges(S2BestDistanceTarget, ShapeFilter, ResultVisitor)} for more * details. */ public Optional<Result<D>> findClosestEdge( S2BestDistanceTarget<D> target, @Nullable ShapeFilter shapeFilter) { return findBestEdge(target, shapeFilter); } /** * Returns the minimum distance to the target. If the index or target is empty, or no edges * satisfy the current Options, returns a distance greater than any valid distance. * * <p>Use isDistanceLess() if you only want to compare the distance against a threshold value, * since it is often much faster. */ public D getDistance(S2BestDistanceTarget<D> target) { return getDistance(target, null); } /** * Returns the minimum distance to the target from edges in the index satisfying the current * Options, and from shapes accepted by the given ShapeFilter. If the index or target is empty, or * no edges satisfy the current Options or ShapeFilter, returns a distance greater than any valid * distance. * * <p>Use isDistanceLess() if you only want to compare the distance against a threshold value, * since it is often much faster. */ public D getDistance(S2BestDistanceTarget<D> target, @Nullable ShapeFilter shapeFilter) { Optional<Result<D>> result = findBestEdge(target, shapeFilter); return result.isPresent() ? result.get().distance() : beyondWorstDistance(); } /** * Returns true if the distance to "target" from any edge in the index is less than "limit". * * <p>This method is usually much faster than getDistance(), since it is less work to determine * whether the minimum distance is above or below a threshold than it is to calculate the actual * minimum distance. */ public boolean isDistanceLess(S2BestDistanceTarget<D> target, D limit) { return isDistanceLess(target, limit, null); } /** * Returns true if the distance to "target" from shapes accepted by the given ShapeFilter is less * than "limit". * * <p>This method is usually much faster than getDistance(), since it is less work to determine * whether the minimum distance is above or below a threshold than it is to calculate the actual * minimum distance. */ public boolean isDistanceLess(S2BestDistanceTarget<D> target, D limit, ShapeFilter shapeFilter) { maxResults = 1; maxError = worstDistance(); distanceLimit = limit; this.shapeFilter = shapeFilter; // Determine if any edge is found within the limit. findBestEdgesInternal(target); this.shapeFilter = null; boolean result = bestResult != null; bestResult = null; return result; } /** Returns the point on the given Result edge that is closest to the given targetPoint. */ public S2Point project(S2Point targetPoint, Result<D> result) { if (result.edgeId() < 0) { return targetPoint; } S2Shape.MutableEdge resultEdge = new S2Shape.MutableEdge(); index.getShapes().get(result.shapeId()).getEdge(result.edgeId(), resultEdge); return S2EdgeUtil.project(targetPoint, resultEdge.getStart(), resultEdge.getEnd()); } /** * Visits shapes in the index with interiors containing a point of a connected component of the * given target. Returns true if all such shapes were visited, or false if the {@link * Options#maxResults()} limit was reached. Note that the visited shapes may either intersect or * completely contain a connected component of the target. * * <p>This is a low-level method, visible for testing. Clients should use {@link * #findClosestEdges} with a maxDistance of S1ChordAngle.ZERO and check {@link * Result#isInterior()}. */ @VisibleForTesting @CanIgnoreReturnValue public boolean visitContainingShapes( Target<D> target, S2ContainsPointQuery.ShapeVisitor visitor) { return visitBestDistanceContainingShapes(target, visitor); } @Override @CanIgnoreReturnValue protected boolean visitBestDistanceContainingShapes( S2BestDistanceTarget<D> target, S2ContainsPointQuery.ShapeVisitor visitor) { S2ContainsPointQuery containsPointQuery = new S2ContainsPointQuery(index); return target.visitConnectedComponentPoints( targetPoint -> containsPointQuery.visitContainingShapes(targetPoint, visitor)); } /** * Subclasses {@code S2BestEdgesQueryBase.Builder<S1ChordAngle>} for finding closest edges using * S1ChordAngle as the distance type. * * <p>Provides the additional convenience methods setMaxDistance(S1Angle), * setInclusiveMaxDistance(S1ChordAngle), and setConservativeMaxDistance(S1ChordAngle). */ public static class Builder extends S2BestEdgesQueryBase.Builder<S1ChordAngle> { /** * Constructs a new Builder with inclusive default values: unlimited results, no maximum * distance, maxError of zero, includeInteriors true, brute force false. */ public Builder() { super(S1ChordAngle.INFINITY, S1ChordAngle.ZERO); } /** Constructs a new Builder with values copied from the given Options. */ public Builder(S2BestEdgesQueryBase.Options<S1ChordAngle> options) { super(options); } /** * Builds a new Query, which is a {@code S2ClosestEdgeQuery<S1ChordAngle>} using the current * Options of this Builder. */ @Override public Query build() { return new Query(new Options<>(this)); } /** * Builds a new Query, which is a {@code S2ClosestEdgeQuery<S1ChordAngle>} for the provided * S2ShapeIndex using the current Options of this Builder. */ @Override public Query build(S2ShapeIndex index) { return new Query(new Options<>(this), index); } /** Specifies the maximum number of results to be returned. */ @CanIgnoreReturnValue @Override public Builder setMaxResults(int maxResults) { this.maxResults = maxResults; return this; } /** * Specifies that only edges whose distance to the target is less than maxDistance should be * returned. * * <p>Note that edges whose distance is exactly equal to "maxDistance" are not returned. In * most cases this doesn't matter, since distances are not computed exactly in the first * place, but if such edges are needed then you can use {@link * Builder#setInclusiveMaxDistance(S1ChordAngle)}. */ @CanIgnoreReturnValue public Builder setMaxDistance(S1ChordAngle maxDistance) { this.distanceLimit = maxDistance; return this; } /** * Like {@link Builder#setMaxDistance(S1ChordAngle)}, but maxDistance is provided as an * S1Angle. */ @CanIgnoreReturnValue public Builder setMaxDistance(S1Angle maxDistance) { this.distanceLimit = S1ChordAngle.fromS1Angle(maxDistance); return this; } /** * Specifies that only edges whose distance to the target is less than or equal to maxDistance * should be returned. */ @CanIgnoreReturnValue public Builder setInclusiveMaxDistance(S1ChordAngle maxDistance) { this.distanceLimit = maxDistance.successor(); return this; } /** Like {@link #setInclusiveMaxDistance(S1ChordAngle)} but takes an S1Angle for convenience. */ @CanIgnoreReturnValue public Builder setInclusiveMaxDistance(S1Angle maxDistance) { setInclusiveMaxDistance(S1ChordAngle.fromS1Angle(maxDistance)); return this; } /** * Like setInclusiveMaxDistance(), except that "maxDistance" is also increased by the maximum * error in distance calculations. This ensures that all edges whose true distance is less than * or equal to "maxDistance" will be returned (along with some edges whose true distance is * slightly greater). * * <p>Algorithms that need to do exact distance comparisons can use this option to find a set of * candidate edges that can then be filtered further (e.g., using {@code * S2Predicates.compareDistance}). */ @CanIgnoreReturnValue public Builder setConservativeMaxDistance(S1ChordAngle maxDistance) { this.distanceLimit = maxDistance.plusError(S2EdgeUtil.getUpdateMinDistanceMaxError(maxDistance)).successor(); return this; } /** * Like {@link #setConservativeMaxDistance(S1ChordAngle)} but takes an S1Angle for convenience. */ @CanIgnoreReturnValue public Builder setConservativeMaxDistance(S1Angle maxDistance) { setConservativeMaxDistance(S1ChordAngle.fromS1Angle(maxDistance)); return this; } /** All results will have distance less than maxDistance() from the target. */ public S1ChordAngle maxDistance() { return distanceLimit; } /** * A non-zero maxError specifies that edges with distance up to maxError further than the true * closest edges may be substituted in the result set, as long as such edges satisfy all the * the remaining search criteria. This option only has an effect if {@link #maxResults()} is * also specified; otherwise all edges that satisfy the maxDistance() will be returned. * * <p>Note that this does not affect how the distance between edges is computed; it simply * gives the algorithm permission to stop the search early, as soon as the best possible * improvement drops below {@link #maxError()}. * * <p>This can be used to implement distance predicates efficiently. For example, to determine * whether the minimum distance is less than 'threshold', set {@code maxResults == 1} and {@code * maxDistance() == maxError() == threshold}. This causes the algorithm to terminate as soon as * it finds any edge whose distance is less than 'threshold', rather than continuing to search * for an edge that is even closer. */ @CanIgnoreReturnValue @Override public Builder setMaxError(S1ChordAngle maxError) { this.maxError = maxError; return this; } /** Like {@link #setMaxError(S1ChordAngle)}, but maxError is provided as an S1Angle. */ @CanIgnoreReturnValue public Builder setMaxError(S1Angle maxError) { this.maxError = S1ChordAngle.fromS1Angle(maxError); return this; } /** * True if polygon interiors in the queried index are considered when computing distance. * * <p>When true, polygons that contain some geometry have a distance of zero to it. For targets * consisting of multiple connected components, this occurs if any component is contained. * This is indicated in the results by returning a (shape, edgeId) pair with {@code edgeId == * -1}, i.e. this value denotes the polygons's interior. * * <p>Note that this does not control if the interiors of _target_ polygons should be * included; that is a separate option on targets. * * <p>Note that for efficiency, any polygon that intersects the target may or may not have an * (edgeId == -1) result. Such results are optional because in that case the distance from the * polygon is already zero. */ @CanIgnoreReturnValue @Override public Builder setIncludeInteriors(boolean includeInteriors) { this.includeInteriors = includeInteriors; return this; } /** * If true, distances should be computed by examining every edge rather than using the * S2ShapeIndex. This is useful for testing, benchmarking, and debugging. */ @CanIgnoreReturnValue @Override public Builder setUseBruteForce(boolean useBruteForce) { this.useBruteForce = useBruteForce; return this; } } /** * Subclasses {@code S2ClosestEdgeQuery<S1ChordAngle>} for finding closest edges using * S1ChordAngle as the distance type. * * <p>Provides the additional convenience methods isDistanceLessOrEqual() and * isConservativeDistanceLessOrEqual(). */ public static class Query extends S2ClosestEdgeQuery<S1ChordAngle> { /** Constructor for internal use. Clients should use the Builder to create queries. */ Query(Options<S1ChordAngle> options) { super(options); } /** Constructor for internal use. Clients should use the Builder to create queries. */ Query(Options<S1ChordAngle> options, S2ShapeIndex index) { super(options); init(index); } /** Convenience method to get a Builder from this Query's current options. */ public S2ClosestEdgeQuery.Builder toBuilder() { return new S2ClosestEdgeQuery.Builder(options()); } @Override protected DistanceCollector<S1ChordAngle> newDistanceCollector() { return S1ChordAngle.minCollector(); } @Override protected boolean atBestLimit(DistanceCollector<S1ChordAngle> distanceCollector) { return distanceCollector.distance().getLength2() <= 0; } @Override protected Comparator<S1ChordAngle> distanceComparator() { // TODO(torrey): When Android supports Comparator.naturalOrder(), just return that. return (S1ChordAngle a, S1ChordAngle b) -> { return a.compareTo(b); }; } @Override protected S1ChordAngle zeroDistance() { return S1ChordAngle.ZERO; } @Override protected S1ChordAngle bestDistance() { return S1ChordAngle.ZERO; } @Override protected S1ChordAngle worstDistance() { return S1ChordAngle.STRAIGHT; } @Override protected S1ChordAngle beyondWorstDistance() { return S1ChordAngle.INFINITY; } /** * Adjust the given 'value' towards zero by the current maximum error. * * <p>For {@link #isDistanceLess(S2BestDistanceTarget, S1Distance)}, maxError is STRAIGHT, so * the errorBoundedDistance will be zero for any given 'value'. */ @Override protected S1ChordAngle errorBoundedDistance(S1ChordAngle value) { return S1ChordAngle.sub(value, maxError); } /** * For closest edges, the search cap radius is the sum of the target cap radius bounding the * area with distance zero from the target, the maximum distance from the target, and the * potential error in computing the max distance from an angle. */ @Override protected S1ChordAngle searchCapRadius(S1ChordAngle targetCapRadius, S1ChordAngle maxDistance) { checkArgument(!targetCapRadius.isNegative()); checkArgument(!targetCapRadius.isInfinity()); checkArgument(!maxDistance.isNegative()); checkArgument(!maxDistance.isInfinity()); return S1ChordAngle.add( targetCapRadius, maxDistance.plusError(maxDistance.getS1AngleConstructorMaxError())); } /** * Like {@link #isDistanceLess(S2BestDistanceTarget, S1Distance)}, but also returns true if * the distance to "target" is exactly equal to "limit". */ public boolean isDistanceLessOrEqual(Target<S1ChordAngle> target, S1ChordAngle limit) { return isDistanceLessOrEqual(target, limit, null); } /** * Like {@link #isDistanceLess(S2BestDistanceTarget, S1Distance, ShapeFilter)}, but also returns * true if the distance to "target" is exactly equal to "limit". */ public boolean isDistanceLessOrEqual( Target<S1ChordAngle> target, S1ChordAngle limit, @Nullable ShapeFilter shapeFilter) { // Note that from here on down, the distanceLimit, maxResults, and maxError fields are used, // not the same-named Options fields. distanceLimit = limit.successor(); maxResults = 1; maxError = worstDistance(); this.shapeFilter = shapeFilter; findBestEdgesInternal(target); this.shapeFilter = null; boolean result = bestResult != null; bestResult = null; return result; } /** * Like {@link #isDistanceLessOrEqual(Target, S1ChordAngle)}, except that "limit" is increased * by the maximum error in the distance calculation. This ensures that this function returns * true whenever the true, exact distance is less than or equal to "limit". * * <p>For example, suppose that we want to test whether two geometries might intersect each * other after they are snapped together using S2Builder (using the IdentitySnapFunction with a * given "snapRadius"). Since S2Builder uses exact distance predicates, we need to measure the * distance between the two geometries conservatively. If the distance is definitely greater * than "snapRadius", then the geometries are guaranteed to not intersect after snapping. */ public boolean isConservativeDistanceLessOrEqual( Target<S1ChordAngle> target, S1ChordAngle limit) { return isConservativeDistanceLessOrEqual(target, limit, null); } /** * Like {@link #isDistanceLessOrEqual(Target, S1ChordAngle, ShapeFilter)}, except that "limit" * is increased by the maximum error in the distance calculation. This ensures that this * function returns true whenever the true, exact distance is less than or equal to "limit". * * <p>For example, suppose that we want to test whether two geometries might intersect each * other after they are snapped together using S2Builder (using the IdentitySnapFunction with a * given "snapRadius"). Since S2Builder uses exact distance predicates, we need to measure the * distance between the two geometries conservatively. If the distance is definitely greater * than "snapRadius", then the geometries are guaranteed to not intersect after snapping. */ public boolean isConservativeDistanceLessOrEqual( Target<S1ChordAngle> target, S1ChordAngle limit, @Nullable ShapeFilter shapeFilter) { // Note that from here on down, the distanceLimit, maxResults, and maxError fields are used, // not the same-named Options fields. distanceLimit = limit.plusError(S2EdgeUtil.getUpdateMinDistanceMaxError(limit)).successor(); maxResults = 1; maxError = worstDistance(); this.shapeFilter = shapeFilter; findBestEdgesInternal(target); this.shapeFilter = null; boolean result = bestResult != null; bestResult = null; return result; } } /** * Convenience method to create a new Builder, which extends {@link S2ClosestEdgeQuery.Builder} * using S1ChordAngle as the distance type. * * <p>The initial options are the most permissive default values. There is no limit on the maximum * distance or number of results returned. The maxError option is zero. The interiors of polygons * in the index are included, i.e. a target contained by an index polygon is at distance zero. */ public static Builder builder() { return new Builder(); } /** * Convenience method to create a ShapeIndexTarget using S1ChordAngle as the distance type. Shape * interiors are included, by default. * * <p>This is used for finding closest edges from a query ShapeIndex to a second ShapeIndex * wrapped by the ShapeIndexTarget. */ public static ShapeIndexTarget<S1ChordAngle> createShapeIndexTarget(S2ShapeIndex index) { return new ShapeIndexTarget<>(index, new Builder()); } }
googleapis/google-cloud-java
35,296
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/UpdateAspectTypeRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/catalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * Update AspectType Request * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateAspectTypeRequest} */ public final class UpdateAspectTypeRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.UpdateAspectTypeRequest) UpdateAspectTypeRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateAspectTypeRequest.newBuilder() to construct. private UpdateAspectTypeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateAspectTypeRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateAspectTypeRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateAspectTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateAspectTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.class, com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.Builder.class); } private int bitField0_; public static final int ASPECT_TYPE_FIELD_NUMBER = 1; private com.google.cloud.dataplex.v1.AspectType aspectType_; /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the aspectType field is set. */ @java.lang.Override public boolean hasAspectType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The aspectType. */ @java.lang.Override public com.google.cloud.dataplex.v1.AspectType getAspectType() { return aspectType_ == null ? com.google.cloud.dataplex.v1.AspectType.getDefaultInstance() : aspectType_; } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataplex.v1.AspectTypeOrBuilder getAspectTypeOrBuilder() { return aspectType_ == null ? com.google.cloud.dataplex.v1.AspectType.getDefaultInstance() : aspectType_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getAspectType()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAspectType()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.UpdateAspectTypeRequest)) { return super.equals(obj); } com.google.cloud.dataplex.v1.UpdateAspectTypeRequest other = (com.google.cloud.dataplex.v1.UpdateAspectTypeRequest) obj; if (hasAspectType() != other.hasAspectType()) return false; if (hasAspectType()) { if (!getAspectType().equals(other.getAspectType())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAspectType()) { hash = (37 * hash) + ASPECT_TYPE_FIELD_NUMBER; hash = (53 * hash) + getAspectType().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.UpdateAspectTypeRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Update AspectType Request * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateAspectTypeRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.UpdateAspectTypeRequest) com.google.cloud.dataplex.v1.UpdateAspectTypeRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateAspectTypeRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateAspectTypeRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.class, com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.Builder.class); } // Construct using com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getAspectTypeFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; aspectType_ = null; if (aspectTypeBuilder_ != null) { aspectTypeBuilder_.dispose(); aspectTypeBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.CatalogProto .internal_static_google_cloud_dataplex_v1_UpdateAspectTypeRequest_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateAspectTypeRequest getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateAspectTypeRequest build() { com.google.cloud.dataplex.v1.UpdateAspectTypeRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateAspectTypeRequest buildPartial() { com.google.cloud.dataplex.v1.UpdateAspectTypeRequest result = new com.google.cloud.dataplex.v1.UpdateAspectTypeRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataplex.v1.UpdateAspectTypeRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.aspectType_ = aspectTypeBuilder_ == null ? aspectType_ : aspectTypeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.UpdateAspectTypeRequest) { return mergeFrom((com.google.cloud.dataplex.v1.UpdateAspectTypeRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.UpdateAspectTypeRequest other) { if (other == com.google.cloud.dataplex.v1.UpdateAspectTypeRequest.getDefaultInstance()) return this; if (other.hasAspectType()) { mergeAspectType(other.getAspectType()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getAspectTypeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.dataplex.v1.AspectType aspectType_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.AspectType, com.google.cloud.dataplex.v1.AspectType.Builder, com.google.cloud.dataplex.v1.AspectTypeOrBuilder> aspectTypeBuilder_; /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the aspectType field is set. */ public boolean hasAspectType() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The aspectType. */ public com.google.cloud.dataplex.v1.AspectType getAspectType() { if (aspectTypeBuilder_ == null) { return aspectType_ == null ? com.google.cloud.dataplex.v1.AspectType.getDefaultInstance() : aspectType_; } else { return aspectTypeBuilder_.getMessage(); } } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAspectType(com.google.cloud.dataplex.v1.AspectType value) { if (aspectTypeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } aspectType_ = value; } else { aspectTypeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAspectType(com.google.cloud.dataplex.v1.AspectType.Builder builderForValue) { if (aspectTypeBuilder_ == null) { aspectType_ = builderForValue.build(); } else { aspectTypeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeAspectType(com.google.cloud.dataplex.v1.AspectType value) { if (aspectTypeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && aspectType_ != null && aspectType_ != com.google.cloud.dataplex.v1.AspectType.getDefaultInstance()) { getAspectTypeBuilder().mergeFrom(value); } else { aspectType_ = value; } } else { aspectTypeBuilder_.mergeFrom(value); } if (aspectType_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearAspectType() { bitField0_ = (bitField0_ & ~0x00000001); aspectType_ = null; if (aspectTypeBuilder_ != null) { aspectTypeBuilder_.dispose(); aspectTypeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.AspectType.Builder getAspectTypeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getAspectTypeFieldBuilder().getBuilder(); } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.AspectTypeOrBuilder getAspectTypeOrBuilder() { if (aspectTypeBuilder_ != null) { return aspectTypeBuilder_.getMessageOrBuilder(); } else { return aspectType_ == null ? com.google.cloud.dataplex.v1.AspectType.getDefaultInstance() : aspectType_; } } /** * * * <pre> * Required. AspectType Resource * </pre> * * <code> * .google.cloud.dataplex.v1.AspectType aspect_type = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.AspectType, com.google.cloud.dataplex.v1.AspectType.Builder, com.google.cloud.dataplex.v1.AspectTypeOrBuilder> getAspectTypeFieldBuilder() { if (aspectTypeBuilder_ == null) { aspectTypeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.AspectType, com.google.cloud.dataplex.v1.AspectType.Builder, com.google.cloud.dataplex.v1.AspectTypeOrBuilder>( getAspectType(), getParentForChildren(), isClean()); aspectType_ = null; } return aspectTypeBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private boolean validateOnly_; /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is false. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.UpdateAspectTypeRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.UpdateAspectTypeRequest) private static final com.google.cloud.dataplex.v1.UpdateAspectTypeRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.UpdateAspectTypeRequest(); } public static com.google.cloud.dataplex.v1.UpdateAspectTypeRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateAspectTypeRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateAspectTypeRequest>() { @java.lang.Override public UpdateAspectTypeRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateAspectTypeRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateAspectTypeRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateAspectTypeRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/solr
34,458
solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.client.solrj.io.stream; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4.SuppressPointFields; import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.comp.ComparatorOrder; import org.apache.solr.client.solrj.io.comp.FieldComparator; import org.apache.solr.client.solrj.io.stream.expr.Expressible; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.stream.metrics.CountMetric; import org.apache.solr.client.solrj.io.stream.metrics.MaxMetric; import org.apache.solr.client.solrj.io.stream.metrics.MeanMetric; import org.apache.solr.client.solrj.io.stream.metrics.MinMetric; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.AbstractDistribZkTestBase; import org.apache.solr.cloud.SolrCloudTestCase; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; /** */ @SuppressPointFields(bugUrl = "https://issues.apache.org/jira/browse/SOLR-10960") @LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40", "Lucene41", "Lucene42", "Lucene45"}) @ThreadLeakLingering(linger = 0) public class JDBCStreamTest extends SolrCloudTestCase { private static final String COLLECTIONORALIAS = "jdbc"; private static final int TIMEOUT = 30; private static final String id = "id"; @BeforeClass public static void setupCluster() throws Exception { configureCluster(4) .addConfig( "conf", getFile("solrj") .resolve("solr") .resolve("configsets") .resolve("streaming") .resolve("conf")) .configure(); boolean useAlias = random().nextBoolean(); String collection; if (useAlias) { collection = COLLECTIONORALIAS + "_collection"; } else { collection = COLLECTIONORALIAS; } CollectionAdminRequest.createCollection(collection, "conf", 2, 1) .process(cluster.getSolrClient()); AbstractDistribZkTestBase.waitForRecoveriesToFinish( collection, cluster.getZkStateReader(), false, true, TIMEOUT); if (useAlias) { CollectionAdminRequest.createAlias(COLLECTIONORALIAS, collection) .process(cluster.getSolrClient()); } } @BeforeClass public static void setupDatabase() throws Exception { // Initialize Database // Ok, so.....hsqldb is doing something totally weird, so I thought I'd take a moment to explain // it. // According to http://www.hsqldb.org/doc/1.8/guide/guide.html#N101EF, section "Components of // SQL Expressions", clause "name", "When an SQL statement is issued, any lowercase characters // in unquoted identifiers are converted to uppercase." :( Like seriously.... // So, for this reason and to simplify writing these tests I've decided that in all statements // all table and column names will be in UPPERCASE. This is to ensure things look and behave // consistently. Note that this is not a requirement of the JDBCStream and is only a carryover // from the driver we are testing with. Class.forName("org.hsqldb.jdbcDriver").getConstructor().newInstance(); Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement(); statement.executeUpdate( "create table COUNTRIES(CODE varchar(3) not null primary key, COUNTRY_NAME varchar(50), DELETED char(1) default 'N')"); statement.executeUpdate( "create table PEOPLE(ID int not null primary key, NAME varchar(50), COUNTRY_CODE char(2), DELETED char(1) default 'N')"); statement.executeUpdate( "create table PEOPLE_SPORTS(ID int not null primary key, PERSON_ID int, SPORT_NAME varchar(50), DELETED char(1) default 'N')"); statement.executeUpdate( "create table UNSUPPORTED_COLUMNS(ID int not null primary key, UNSP binary)"); statement.executeUpdate("create table DUAL(ID int not null primary key)"); statement.executeUpdate("insert into DUAL values(1)"); } @AfterClass public static void teardownDatabase() throws SQLException { Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement(); statement.executeUpdate("shutdown"); } @Before public void cleanIndex() throws Exception { new UpdateRequest().deleteByQuery("*:*").commit(cluster.getSolrClient(), COLLECTIONORALIAS); } @Before public void cleanDatabase() throws Exception { // Clear database try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate("delete from COUNTRIES WHERE 1=1"); statement.executeUpdate("delete from PEOPLE WHERE 1=1"); statement.executeUpdate("delete from PEOPLE_SPORTS WHERE 1=1"); statement.executeUpdate("delete from UNSUPPORTED_COLUMNS WHERE 1=1"); } } @Test public void testJDBCSelect() throws Exception { // Load Database Data try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')"); statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')"); } TupleStream stream; List<Tuple> tuples; // Simple 1 stream = new JDBCStream( "jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE", new FieldComparator("CODE", ComparatorOrder.ASCENDING)); tuples = getTuples(stream); assertEquals(4, tuples.size()); assertOrderOf(tuples, "CODE", "NL", "NO", "NP", "US"); assertOrderOf(tuples, "COUNTRY_NAME", "Netherlands", "Norway", "Nepal", "United States"); // Simple 2 stream = new JDBCStream( "jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by COUNTRY_NAME", new FieldComparator("COUNTRY_NAME", ComparatorOrder.ASCENDING)); tuples = getTuples(stream); assertEquals(4, tuples.size()); assertOrderOf(tuples, "CODE", "NP", "NL", "NO", "US"); assertOrderOf(tuples, "COUNTRY_NAME", "Nepal", "Netherlands", "Norway", "United States"); // Additional Types String query = "select 1 as ID1, {ts '2017-02-18 12:34:56.789'} as TS1, {t '01:02:03'} as T1, " + "{d '1593-03-14'} as D1, cast(12.34 AS DECIMAL(4,2)) as DEC4_2, " + "cast(1234 AS DECIMAL(4,0)) as DEC4_0, cast('big stuff' as CLOB(100)) as CLOB1 " + "from DUAL order by ID1"; stream = new JDBCStream( "jdbc:hsqldb:mem:.", query, new FieldComparator("ID1", ComparatorOrder.ASCENDING)); tuples = getTuples(stream); assertEquals(1, tuples.size()); Tuple t; try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { ResultSet rs = statement.executeQuery(query); rs.next(); t = tuples.iterator().next(); assertString(t, "CLOB1", rs.getString("CLOB1")); assertString(t, "TS1", rs.getTimestamp("TS1").toInstant().toString()); assertString(t, "T1", rs.getTime("T1").toString()); assertString(t, "D1", rs.getDate("D1").toString()); assertDouble(t, "DEC4_2", rs.getDouble("DEC4_2")); assertLong(t, "DEC4_0", rs.getLong("DEC4_0")); } } @Test public void testJDBCJoin() throws Exception { // Load Database Data try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')"); statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','NI')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NG')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NF')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NE')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','NC')"); statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NZ')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','NR')"); } TupleStream stream; List<Tuple> tuples; // Simple 1 stream = new JDBCStream( "jdbc:hsqldb:mem:.", "select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE where COUNTRIES.CODE = 'NL' order by PEOPLE.ID", new FieldComparator("ID", ComparatorOrder.ASCENDING)); tuples = getTuples(stream); assertEquals(3, tuples.size()); assertOrderOf(tuples, "ID", 11, 17, 19); assertOrderOf(tuples, "NAME", "Emma", "Mia", "Olivia"); } @Test public void testJDBCSolrMerge() throws Exception { // Load Database Data try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')"); statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('AL', 'Algeria')"); } StreamContext streamContext = new StreamContext(); SolrClientCache solrClientCache = new SolrClientCache(); streamContext.setSolrClientCache(solrClientCache); // Load Solr new UpdateRequest() .add(id, "0", "code_s", "GB", "name_s", "Great Britain") .add(id, "1", "code_s", "CA", "name_s", "Canada") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); StreamFactory factory = new StreamFactory() .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()) .withFunctionName("search", CloudSolrStream.class); List<Tuple> tuples; try { // Simple 1 TupleStream jdbcStream = new JDBCStream( "jdbc:hsqldb:mem:.", "select CODE,COUNTRY_NAME from COUNTRIES order by CODE", new FieldComparator("CODE", ComparatorOrder.ASCENDING)); TupleStream selectStream = new SelectStream( jdbcStream, Map.of( "CODE", "code_s", "COUNTRY_NAME", "name_s")); TupleStream searchStream = factory.constructStream( "search(" + COLLECTIONORALIAS + ", fl=\"code_s,name_s\",q=\"*:*\",sort=\"code_s asc\")"); TupleStream mergeStream = new MergeStream( new FieldComparator("code_s", ComparatorOrder.ASCENDING), new TupleStream[] {selectStream, searchStream}); mergeStream.setStreamContext(streamContext); tuples = getTuples(mergeStream); assertEquals(7, tuples.size()); assertOrderOf(tuples, "code_s", "AL", "CA", "GB", "NL", "NO", "NP", "US"); assertOrderOf( tuples, "name_s", "Algeria", "Canada", "Great Britain", "Netherlands", "Norway", "Nepal", "United States"); } finally { solrClientCache.close(); } } @Test public void testJDBCSolrInnerJoinExpression() throws Exception { StreamFactory factory = new StreamFactory() .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()) .withFunctionName("search", CloudSolrStream.class) .withFunctionName("select", SelectStream.class) .withFunctionName("innerJoin", InnerJoinStream.class) .withFunctionName("jdbc", JDBCStream.class); // Load Database Data try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')"); statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')"); statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')"); } // Load solr data new UpdateRequest() .add(id, "1", "rating_f", "3.5", "personId_i", "11") .add(id, "2", "rating_f", "5", "personId_i", "12") .add(id, "3", "rating_f", "2.2", "personId_i", "13") .add(id, "4", "rating_f", "4.3", "personId_i", "14") .add(id, "5", "rating_f", "3.5", "personId_i", "15") .add(id, "6", "rating_f", "3", "personId_i", "16") .add(id, "7", "rating_f", "3", "personId_i", "17") .add(id, "8", "rating_f", "4", "personId_i", "18") .add(id, "9", "rating_f", "4.1", "personId_i", "19") .add(id, "10", "rating_f", "4.8", "personId_i", "20") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); String expression; TupleStream stream; List<Tuple> tuples; StreamContext streamContext = new StreamContext(); SolrClientCache solrClientCache = new SolrClientCache(); streamContext.setSolrClientCache(solrClientCache); try { // Basic test expression = "innerJoin(" + " select(" + " search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\")," + " personId_i as personId," + " rating_f as rating" + " )," + " select(" + " jdbc(fetchSize=300, connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\")," + " ID as personId," + " NAME as personName," + " COUNTRY_NAME as country" + " )," + " on=\"personId\"" + ")"; stream = factory.constructStream(expression); String expr = ((Expressible) stream).toExpression(factory).toString(); assertTrue(expr.contains("fetchSize=300")); stream.setStreamContext(streamContext); tuples = getTuples(stream); assertEquals(10, tuples.size()); assertOrderOf(tuples, "personId", 11, 12, 13, 14, 15, 16, 17, 18, 19, 20); assertOrderOf(tuples, "rating", 3.5d, 5d, 2.2d, 4.3d, 3.5d, 3d, 3d, 4d, 4.1d, 4.8d); assertOrderOf( tuples, "personName", "Emma", "Grace", "Hailey", "Isabella", "Lily", "Madison", "Mia", "Natalie", "Olivia", "Samantha"); assertOrderOf( tuples, "country", "Netherlands", "United States", "Netherlands", "Netherlands", "Netherlands", "United States", "United States", "Netherlands", "Netherlands", "United States"); } finally { solrClientCache.close(); } } @Test public void testJDBCSolrInnerJoinExpressionWithProperties() throws Exception { StreamFactory factory = new StreamFactory() .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()) .withFunctionName("search", CloudSolrStream.class) .withFunctionName("select", SelectStream.class) .withFunctionName("innerJoin", InnerJoinStream.class) .withFunctionName("jdbc", JDBCStream.class); // Load Database Data try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')"); statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')"); statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')"); } // Load solr data new UpdateRequest() .add(id, "1", "rating_f", "3.5", "personId_i", "11") .add(id, "2", "rating_f", "5", "personId_i", "12") .add(id, "3", "rating_f", "2.2", "personId_i", "13") .add(id, "4", "rating_f", "4.3", "personId_i", "14") .add(id, "5", "rating_f", "3.5", "personId_i", "15") .add(id, "6", "rating_f", "3", "personId_i", "16") .add(id, "7", "rating_f", "3", "personId_i", "17") .add(id, "8", "rating_f", "4", "personId_i", "18") .add(id, "9", "rating_f", "4.1", "personId_i", "19") .add(id, "10", "rating_f", "4.8", "personId_i", "20") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); String expression; TupleStream stream; List<Tuple> tuples; StreamContext streamContext = new StreamContext(); SolrClientCache solrClientCache = new SolrClientCache(); streamContext.setSolrClientCache(solrClientCache); try { // Basic test for no alias expression = "innerJoin(" + " select(" + " search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\")," + " personId_i as personId," + " rating_f as rating" + " )," + " select(" + " jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"ID asc\")," + " ID as personId," + " NAME as personName," + " COUNTRY_NAME as country" + " )," + " on=\"personId\"" + ")"; stream = factory.constructStream(expression); stream.setStreamContext(streamContext); tuples = getTuples(stream); assertEquals(10, tuples.size()); assertOrderOf(tuples, "personId", 11, 12, 13, 14, 15, 16, 17, 18, 19, 20); assertOrderOf(tuples, "rating", 3.5d, 5d, 2.2d, 4.3d, 3.5d, 3d, 3d, 4d, 4.1d, 4.8d); assertOrderOf( tuples, "personName", "Emma", "Grace", "Hailey", "Isabella", "Lily", "Madison", "Mia", "Natalie", "Olivia", "Samantha"); assertOrderOf( tuples, "country", "Netherlands", "United States", "Netherlands", "Netherlands", "Netherlands", "United States", "United States", "Netherlands", "Netherlands", "United States"); // Basic test for alias expression = "innerJoin(" + " select(" + " search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\")," + " personId_i as personId," + " rating_f as rating" + " )," + " select(" + " jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID as PERSONID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by PEOPLE.ID\", sort=\"PERSONID asc\")," + " PERSONID as personId," + " NAME as personName," + " COUNTRY_NAME as country" + " )," + " on=\"personId\"" + ")"; stream = factory.constructStream(expression); stream.setStreamContext(streamContext); tuples = getTuples(stream); assertEquals(10, tuples.size()); assertOrderOf(tuples, "personId", 11, 12, 13, 14, 15, 16, 17, 18, 19, 20); assertOrderOf(tuples, "rating", 3.5d, 5d, 2.2d, 4.3d, 3.5d, 3d, 3d, 4d, 4.1d, 4.8d); assertOrderOf( tuples, "personName", "Emma", "Grace", "Hailey", "Isabella", "Lily", "Madison", "Mia", "Natalie", "Olivia", "Samantha"); assertOrderOf( tuples, "country", "Netherlands", "United States", "Netherlands", "Netherlands", "Netherlands", "United States", "United States", "Netherlands", "Netherlands", "United States"); } finally { solrClientCache.close(); } } @Test public void testJDBCSolrInnerJoinRollupExpression() throws Exception { StreamFactory factory = new StreamFactory() .withCollectionZkHost(COLLECTIONORALIAS, cluster.getZkServer().getZkAddress()) .withFunctionName("search", CloudSolrStream.class) .withFunctionName("select", SelectStream.class) .withFunctionName("hashJoin", HashJoinStream.class) .withFunctionName("rollup", RollupStream.class) .withFunctionName("jdbc", JDBCStream.class) .withFunctionName("max", MaxMetric.class) .withFunctionName("min", MinMetric.class) .withFunctionName("avg", MeanMetric.class) .withFunctionName("count", CountMetric.class); // Load Database Data try (Connection connection = DriverManager.getConnection("jdbc:hsqldb:mem:."); Statement statement = connection.createStatement()) { statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('US', 'United States')"); statement.executeUpdate( "insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NL', 'Netherlands')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NP', 'Nepal')"); statement.executeUpdate("insert into COUNTRIES (CODE,COUNTRY_NAME) values ('NO', 'Norway')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (11,'Emma','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (12,'Grace','US')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (13,'Hailey','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (14,'Isabella','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (15,'Lily','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (16,'Madison','US')"); statement.executeUpdate("insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (17,'Mia','US')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (18,'Natalie','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (19,'Olivia','NL')"); statement.executeUpdate( "insert into PEOPLE (ID, NAME, COUNTRY_CODE) values (20,'Samantha','US')"); } // Load solr data new UpdateRequest() .add(id, "1", "rating_f", "3.5", "personId_i", "11") .add(id, "3", "rating_f", "2.2", "personId_i", "13") .add(id, "4", "rating_f", "4.3", "personId_i", "14") .add(id, "5", "rating_f", "3.5", "personId_i", "15") .add(id, "8", "rating_f", "4", "personId_i", "18") .add(id, "9", "rating_f", "4.1", "personId_i", "19") .add(id, "2", "rating_f", "5", "personId_i", "12") .add(id, "6", "rating_f", "3", "personId_i", "16") .add(id, "7", "rating_f", "3", "personId_i", "17") .add(id, "10", "rating_f", "4.8", "personId_i", "20") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); String expression; TupleStream stream; List<Tuple> tuples; StreamContext streamContext = new StreamContext(); SolrClientCache solrClientCache = new SolrClientCache(); streamContext.setSolrClientCache(solrClientCache); try { // Basic test expression = "rollup(" + " hashJoin(" + " hashed=select(" + " search(" + COLLECTIONORALIAS + ", fl=\"personId_i,rating_f\", q=\"rating_f:*\", sort=\"personId_i asc\")," + " personId_i as personId," + " rating_f as rating" + " )," + " select(" + " jdbc(connection=\"jdbc:hsqldb:mem:.\", sql=\"select PEOPLE.ID, PEOPLE.NAME, COUNTRIES.COUNTRY_NAME from PEOPLE inner join COUNTRIES on PEOPLE.COUNTRY_CODE = COUNTRIES.CODE order by COUNTRIES.COUNTRY_NAME\", sort=\"COUNTRIES.COUNTRY_NAME asc\")," + " ID as personId," + " NAME as personName," + " COUNTRY_NAME as country" + " )," + " on=\"personId\"" + " )," + " over=\"country\"," + " max(rating)," + " min(rating)," + " avg(rating)," + " count(*)" + ")"; stream = factory.constructStream(expression); stream.setStreamContext(streamContext); tuples = getTuples(stream); assertEquals(2, tuples.size()); Tuple tuple = tuples.get(0); assertEquals("Netherlands", tuple.getString("country")); assertEquals(4.3D, tuple.getDouble("max(rating)"), 0.0001); assertEquals(2.2D, tuple.getDouble("min(rating)"), 0.0001); assertEquals(3.6D, tuple.getDouble("avg(rating)"), 0.0001); assertEquals(6D, tuple.getDouble("count(*)"), 0.0001); tuple = tuples.get(1); assertEquals("United States", tuple.getString("country")); assertEquals(5D, tuple.getDouble("max(rating)"), 0.0001); assertEquals(3D, tuple.getDouble("min(rating)"), 0.0001); assertEquals(3.95D, tuple.getDouble("avg(rating)"), 0.0001); assertEquals(4D, tuple.getDouble("count(*)"), 0.0001); } finally { solrClientCache.close(); } } @Test(expected = IOException.class) public void testUnsupportedColumns() throws Exception { // No need to load table with any data TupleStream stream; // Simple 1 stream = new JDBCStream( "jdbc:hsqldb:mem:.", "select ID,UNSP from UNSUPPORTED_COLUMNS", new FieldComparator("CODE", ComparatorOrder.ASCENDING)); getTuples(stream); } protected List<Tuple> getTuples(TupleStream tupleStream) throws IOException { tupleStream.open(); List<Tuple> tuples = new ArrayList<>(); for (Tuple t = tupleStream.read(); !t.EOF; t = tupleStream.read()) { tuples.add(t); } tupleStream.close(); return tuples; } protected void assertOrderOf(List<Tuple> tuples, String fieldName, int... values) throws Exception { int i = 0; for (int val : values) { Tuple t = tuples.get(i); Long tip = (Long) t.get(fieldName); if (tip.intValue() != val) { throw new Exception("Found value:" + tip.intValue() + " expecting:" + val); } ++i; } } protected void assertOrderOf(List<Tuple> tuples, String fieldName, double... values) { int i = 0; for (double val : values) { Tuple t = tuples.get(i); double tip = (double) t.get(fieldName); assertEquals("Found value:" + tip + " expecting:" + val, val, tip, 0.00001); ++i; } } protected void assertOrderOf(List<Tuple> tuples, String fieldName, String... values) throws Exception { int i = 0; for (String val : values) { Tuple t = tuples.get(i); if (null == val) { if (null != t.get(fieldName)) { throw new Exception("Found value:" + t.get(fieldName) + " expecting:null"); } } else { String tip = (String) t.get(fieldName); if (!tip.equals(val)) { throw new Exception("Found value:" + tip + " expecting:" + val); } } ++i; } } public boolean assertLong(Tuple tuple, String fieldName, long l) throws Exception { long lv = (long) tuple.get(fieldName); if (lv != l) { throw new Exception("Longs not equal:" + l + " : " + lv); } return true; } public boolean assertDouble(Tuple tuple, String fieldName, double d) throws Exception { double dv = (double) tuple.get(fieldName); if (dv != d) { throw new Exception("Doubles not equal:" + d + " : " + dv); } return true; } public boolean assertString(Tuple tuple, String fieldName, String expected) throws Exception { String actual = (String) tuple.get(fieldName); if ((null == expected && null != actual) || (null != expected && !expected.equals(actual))) { throw new Exception("Longs not equal:" + expected + " : " + actual); } return true; } }
apache/oozie
35,686
core/src/main/java/org/apache/oozie/command/wf/SignalXCommand.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.command.wf; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.oozie.action.ActionExecutor; import org.apache.oozie.action.control.ForkActionExecutor; import org.apache.oozie.action.control.StartActionExecutor; import org.apache.oozie.action.oozie.SubWorkflowActionExecutor; import org.apache.oozie.client.Job; import org.apache.oozie.client.WorkflowAction; import org.apache.oozie.client.WorkflowJob; import org.apache.oozie.client.SLAEvent.SlaAppType; import org.apache.oozie.client.SLAEvent.Status; import org.apache.oozie.client.rest.JsonBean; import org.apache.oozie.SLAEventBean; import org.apache.oozie.WorkflowActionBean; import org.apache.oozie.WorkflowJobBean; import org.apache.oozie.ErrorCode; import org.apache.oozie.XException; import org.apache.oozie.command.CommandException; import org.apache.oozie.command.PreconditionException; import org.apache.oozie.command.wf.ActionXCommand.ActionExecutorContext; import org.apache.oozie.command.wf.ActionXCommand.ForkedActionExecutorContext; import org.apache.oozie.executor.jpa.BatchQueryExecutor.UpdateEntry; import org.apache.oozie.executor.jpa.BatchQueryExecutor; import org.apache.oozie.executor.jpa.JPAExecutorException; import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor; import org.apache.oozie.executor.jpa.WorkflowActionQueryExecutor.WorkflowActionQuery; import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor; import org.apache.oozie.executor.jpa.WorkflowJobQueryExecutor.WorkflowJobQuery; import org.apache.oozie.service.ActionService; import org.apache.oozie.service.CallableQueueService; import org.apache.oozie.service.CallableQueueService.CallableWrapper; import org.apache.oozie.service.ConfigurationService; import org.apache.oozie.service.ELService; import org.apache.oozie.service.EventHandlerService; import org.apache.oozie.service.JPAService; import org.apache.oozie.service.Services; import org.apache.oozie.service.UUIDService; import org.apache.oozie.service.WorkflowStoreService; import org.apache.oozie.workflow.WorkflowException; import org.apache.oozie.workflow.WorkflowInstance; import org.apache.oozie.workflow.lite.KillNodeDef; import org.apache.oozie.workflow.lite.NodeDef; import org.apache.oozie.util.ELEvaluator; import org.apache.oozie.util.InstrumentUtils; import org.apache.oozie.util.LogUtils; import org.apache.oozie.util.XConfiguration; import org.apache.oozie.util.ParamChecker; import org.apache.oozie.util.XmlUtils; import org.apache.oozie.util.db.SLADbXOperations; import org.jdom2.Element; import java.io.StringReader; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Map; import java.util.concurrent.Future; import org.apache.oozie.client.OozieClient; @SuppressWarnings("deprecation") public class SignalXCommand extends WorkflowXCommand<Void> { private JPAService jpaService = null; private String jobId; private String actionId; private WorkflowJobBean wfJob; private WorkflowActionBean wfAction; private List<UpdateEntry> updateList = new ArrayList<UpdateEntry>(); private List<JsonBean> insertList = new ArrayList<JsonBean>(); private boolean generateEvent = false; private String wfJobErrorCode; private String wfJobErrorMsg; public final static String FORK_PARALLEL_JOBSUBMISSION = "oozie.workflow.parallel.fork.action.start"; public SignalXCommand(String name, int priority, String jobId) { super(name, name, priority); this.jobId = ParamChecker.notEmpty(jobId, "jobId"); } public SignalXCommand(String jobId, String actionId) { this("signal", 1, jobId); this.actionId = ParamChecker.notEmpty(actionId, "actionId"); } @Override protected void setLogInfo() { if (jobId != null) { LogUtils.setLogInfo(jobId); } else if (actionId !=null) { LogUtils.setLogInfo(actionId); } } @Override protected boolean isLockRequired() { return true; } @Override public String getEntityKey() { return this.jobId; } @Override public String getKey() { return getName() + "_" + jobId + "_" + actionId; } @Override protected void loadState() throws CommandException { try { jpaService = Services.get().get(JPAService.class); if (jpaService != null) { this.wfJob = WorkflowJobQueryExecutor.getInstance().get(WorkflowJobQuery.GET_WORKFLOW, jobId); LogUtils.setLogInfo(wfJob); if (actionId != null) { this.wfAction = WorkflowActionQueryExecutor.getInstance() .get(WorkflowActionQuery.GET_ACTION_SIGNAL, actionId); LogUtils.setLogInfo(wfAction); } } else { throw new CommandException(ErrorCode.E0610); } } catch (XException ex) { throw new CommandException(ex); } } @Override protected void verifyPrecondition() throws CommandException, PreconditionException { if ((wfAction == null) || (wfAction.isComplete() && wfAction.isPending())) { if (wfJob.getStatus() != WorkflowJob.Status.RUNNING && wfJob.getStatus() != WorkflowJob.Status.PREP) { // In case of forked actions there might be a case when an action - running in parallel - fails. // In that case in the same fork, an other running action would not pass the precondition // check, as the workflow job itself gets failed as well because of the other action's failure. // This behaviour leads to the incidence that the action will stick in RUNNING phase. // Hence the below method is responsible for recognizing those scenarios. // If there is an (other) action which's status is FAILED n in the same workflow job of this action // to be checked, then it means this action was launched in parallel (with that other action), // because otherwise the workflow job would not have transitioned to this action due to the // other workflow's failure. if (isOtherActionFailedUnderJob(wfJob, wfAction)) { // Skipping throwing exception, therefore preventing this action to be stuck in RUNNING phase return; } throw new PreconditionException(ErrorCode.E0813, wfJob.getStatusStr()); } } else { throw new PreconditionException(ErrorCode.E0814, actionId, wfAction.getStatusStr(), wfAction.isPending()); } } @Override protected Void execute() throws CommandException { LOG.debug("STARTED SignalCommand for jobid=" + jobId + ", actionId=" + actionId); WorkflowInstance workflowInstance = wfJob.getWorkflowInstance(); workflowInstance.setTransientVar(WorkflowStoreService.WORKFLOW_BEAN, wfJob); WorkflowJob.Status prevStatus = wfJob.getStatus(); boolean completed = false, skipAction = false; WorkflowActionBean syncAction = null; List<WorkflowActionBean> workflowActionBeanListForForked = new ArrayList<WorkflowActionBean>(); if (wfAction == null) { if (wfJob.getStatus() == WorkflowJob.Status.PREP) { try { completed = workflowInstance.start(); } catch (WorkflowException e) { throw new CommandException(e); } wfJob.setStatus(WorkflowJob.Status.RUNNING); wfJob.setStartTime(new Date()); wfJob.setWorkflowInstance(workflowInstance); generateEvent = true; // 1. Add SLA status event for WF-JOB with status STARTED SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, Status.STARTED, SlaAppType.WORKFLOW_JOB); if (slaEvent != null) { insertList.add(slaEvent); } // 2. Add SLA registration events for all WF_ACTIONS createSLARegistrationForAllActions(workflowInstance.getApp().getDefinition(), wfJob.getUser(), wfJob.getGroup(), wfJob.getConf()); queue(new WorkflowNotificationXCommand(wfJob)); } else { throw new CommandException(ErrorCode.E0801, wfJob.getId()); } } else { WorkflowInstance.Status initialStatus = workflowInstance.getStatus(); String skipVar = workflowInstance.getVar(wfAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP); if (skipVar != null) { skipAction = skipVar.equals("true"); } try { completed = workflowInstance.signal(wfAction.getExecutionPath(), wfAction.getSignalValue()); } catch (WorkflowException e) { LOG.error("Workflow action failed : " + e.getMessage(), e); wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString())); completed = true; } wfJob.setWorkflowInstance(workflowInstance); wfAction.resetPending(); if (!skipAction) { wfAction.setTransition(workflowInstance.getTransition(wfAction.getName())); queue(new WorkflowNotificationXCommand(wfJob, wfAction)); } updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS, wfAction)); WorkflowInstance.Status endStatus = workflowInstance.getStatus(); if (endStatus != initialStatus) { generateEvent = true; } } if (completed) { try { for (String actionToKillId : WorkflowStoreService.getActionsToKill(workflowInstance)) { WorkflowActionBean actionToKill; actionToKill = WorkflowActionQueryExecutor.getInstance().get( WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, actionToKillId); actionToKill.setPending(); actionToKill.setStatus(WorkflowActionBean.Status.KILLED); updateList.add(new UpdateEntry<WorkflowActionQuery>( WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToKill)); queue(new ActionKillXCommand(actionToKill.getId(), actionToKill.getType())); } for (String actionToFailId : WorkflowStoreService.getActionsToFail(workflowInstance)) { WorkflowActionBean actionToFail = WorkflowActionQueryExecutor.getInstance().get( WorkflowActionQuery.GET_ACTION_FAIL, actionToFailId); actionToFail.resetPending(); actionToFail.setStatus(WorkflowActionBean.Status.FAILED); if (wfJobErrorCode != null) { wfJobErrorCode = actionToFail.getErrorCode(); wfJobErrorMsg = actionToFail.getErrorMessage(); } queue(new WorkflowNotificationXCommand(wfJob, actionToFail)); SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfAction.getSlaXml(), wfAction.getId(), Status.FAILED, SlaAppType.WORKFLOW_ACTION); if (slaEvent != null) { insertList.add(slaEvent); } updateList.add(new UpdateEntry<WorkflowActionQuery>( WorkflowActionQuery.UPDATE_ACTION_STATUS_PENDING, actionToFail)); } } catch (JPAExecutorException je) { throw new CommandException(je); } wfJob.setStatus(WorkflowJob.Status.valueOf(workflowInstance.getStatus().toString())); wfJob.setEndTime(new Date()); wfJob.setWorkflowInstance(workflowInstance); Status slaStatus = Status.SUCCEEDED; switch (wfJob.getStatus()) { case SUCCEEDED: slaStatus = Status.SUCCEEDED; break; case KILLED: slaStatus = Status.KILLED; break; case FAILED: slaStatus = Status.FAILED; break; default: // TODO SUSPENDED break; } SLAEventBean slaEvent = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), jobId, slaStatus, SlaAppType.WORKFLOW_JOB); if (slaEvent != null) { insertList.add(slaEvent); } queue(new WorkflowNotificationXCommand(wfJob)); if (wfJob.getStatus() == WorkflowJob.Status.SUCCEEDED) { InstrumentUtils.incrJobCounter(INSTR_SUCCEEDED_JOBS_COUNTER_NAME, 1, getInstrumentation()); } // output message for Kill node if (wfAction != null) { // wfAction could be a no-op job NodeDef nodeDef = workflowInstance.getNodeDef(wfAction.getExecutionPath()); if (nodeDef != null && nodeDef instanceof KillNodeDef) { boolean isRetry = false; boolean isUserRetry = false; ActionExecutorContext context = new ActionXCommand.ActionExecutorContext(wfJob, wfAction, isRetry, isUserRetry); InstrumentUtils.incrJobCounter(INSTR_KILLED_JOBS_COUNTER_NAME, 1, getInstrumentation()); try { String tmpNodeConf = nodeDef.getConf(); String message = context.getELEvaluator().evaluate(tmpNodeConf, String.class); LOG.debug( "Try to resolve KillNode message for jobid [{0}], actionId [{1}], before resolve [{2}], " + "after resolve [{3}]", jobId, actionId, tmpNodeConf, message); if (wfAction.getErrorCode() != null) { wfAction.setErrorInfo(wfAction.getErrorCode(), message); } else { wfAction.setErrorInfo(ErrorCode.E0729.toString(), message); } } catch (Exception ex) { LOG.warn("Exception in SignalXCommand when processing Kill node message: {0}", ex.getMessage(), ex); wfAction.setErrorInfo(ErrorCode.E0756.toString(), ErrorCode.E0756.format(ex.getMessage())); wfAction.setStatus(WorkflowAction.Status.ERROR); } updateList.add(new UpdateEntry<WorkflowActionQuery>( WorkflowActionQuery.UPDATE_ACTION_PENDING_TRANS_ERROR, wfAction)); } } } else { for (WorkflowActionBean newAction : WorkflowStoreService.getActionsToStart(workflowInstance)) { boolean isOldWFAction = false; // In case of subworkflow rerun when failed option have been provided, rerun command do not delete // old action. To avoid twice entry for same action, Checking in Db if the workflow action already exist. if(SubWorkflowActionExecutor.ACTION_TYPE.equals(newAction.getType())) { try { WorkflowActionBean oldAction = WorkflowActionQueryExecutor.getInstance() .get(WorkflowActionQuery.GET_ACTION_CHECK, newAction.getId()); newAction.setExternalId(oldAction.getExternalId()); newAction.setCreatedTime(oldAction.getCreatedTime()); isOldWFAction = true; } catch (JPAExecutorException e) { if(e.getErrorCode() != ErrorCode.E0605) { throw new CommandException(e); } } } String skipVar = workflowInstance.getVar(newAction.getName() + WorkflowInstance.NODE_VAR_SEPARATOR + ReRunXCommand.TO_SKIP); boolean skipNewAction = false, suspendNewAction = false; if (skipVar != null) { skipNewAction = skipVar.equals("true"); } if (skipNewAction) { WorkflowActionBean oldAction = new WorkflowActionBean(); oldAction.setId(newAction.getId()); oldAction.setPending(); oldAction.setExecutionPath(newAction.getExecutionPath()); updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_PENDING, oldAction)); queue(new SignalXCommand(jobId, oldAction.getId())); } else { if(!skipAction) { try { // Make sure that transition node for a forked action // is inserted only once WorkflowActionQueryExecutor.getInstance().get(WorkflowActionQuery.GET_ACTION_ID_TYPE_LASTCHECK, newAction.getId()); if (!SubWorkflowActionExecutor.ACTION_TYPE.equals(newAction.getType())) { continue; } } catch (JPAExecutorException jee) { } } suspendNewAction = checkForSuspendNode(newAction); newAction.setPending(); String actionSlaXml = getActionSLAXml(newAction.getName(), workflowInstance.getApp() .getDefinition(), wfJob.getConf()); newAction.setSlaXml(actionSlaXml); if(!isOldWFAction) { newAction.setCreatedTime(new Date()); insertList.add(newAction); } else { updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, newAction)); } LOG.debug("SignalXCommand: Name: " + newAction.getName() + ", Id: " + newAction.getId() + ", Authcode:" + newAction.getCred()); if (wfAction != null) { // null during wf job submit ActionService as = Services.get().get(ActionService.class); ActionExecutor current = as.getExecutor(wfAction.getType()); LOG.trace("Current Action Type:" + current.getClass()); if (!suspendNewAction) { if (current instanceof StartActionExecutor) { // Excluding :start: here from executing first action synchronously since it // blocks the consumer thread till the action is submitted to Hadoop, // in turn reducing the number of new submissions the threads can accept. // Would also be susceptible to longer delays in case Hadoop cluster is busy. queue(new ActionStartXCommand(newAction.getId(), newAction.getType())); } else if (current instanceof ForkActionExecutor) { if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) { workflowActionBeanListForForked.add(newAction); } else { queue(new ActionStartXCommand(newAction.getId(), newAction.getType())); } } else { syncAction = newAction; } } else { // suspend check will happen later... where if one of action is suspended all forked action // will be ignored. if (ConfigurationService.getBoolean(SignalXCommand.FORK_PARALLEL_JOBSUBMISSION)) { workflowActionBeanListForForked.add(newAction); } } } else { syncAction = newAction; // first action after wf submit should always be sync } } } } try { wfJob.setLastModifiedTime(new Date()); updateList.add(new UpdateEntry<WorkflowJobQuery>( WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MOD_START_END, wfJob)); // call JPAExecutor to do the bulk writes BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null); if (prevStatus != wfJob.getStatus()) { LOG.debug("Updated the workflow status to " + wfJob.getId() + " status =" + wfJob.getStatusStr()); } if (generateEvent && EventHandlerService.isEnabled()) { generateEvent(wfJob, wfJobErrorCode, wfJobErrorMsg); } } catch (JPAExecutorException je) { throw new CommandException(je); } // Changing to synchronous call from asynchronous queuing to prevent // undue delay from between end of previous and start of next action if (wfJob.getStatus() != WorkflowJob.Status.RUNNING && wfJob.getStatus() != WorkflowJob.Status.SUSPENDED) { // only for asynchronous actions, parent coord action's external id will // persisted and following update will succeed. updateParentIfNecessary(wfJob); new WfEndXCommand(wfJob).call(); // To delete the WF temp dir } else if (syncAction != null) { new ActionStartXCommand(wfJob, syncAction.getId(), syncAction.getType()).call(); } else if (!workflowActionBeanListForForked.isEmpty() && !checkForSuspendNode(workflowActionBeanListForForked)) { startForkedActions(workflowActionBeanListForForked); } LOG.debug("ENDED SignalCommand for jobid=" + jobId + ", actionId=" + actionId); return null; } public void startForkedActions(List<WorkflowActionBean> workflowActionBeanListForForked) throws CommandException { List<UpdateEntry> updateList = new ArrayList<UpdateEntry>(); List<JsonBean> insertList = new ArrayList<JsonBean>(); boolean endWorkflow = false; boolean submitJobByQueuing = false; try { /* * The limited thread execution mechanism aims to solve the dead-lock when all active threads are * executing the SignalXCommand's invokeAll method. * * Solution * 1. Need to limit directly invokeAll call when the num of rest threads is less than the tasks * 2. To obtain correct active threads number in callableQueue, the SignalXCommand.class lock is needed. * */ CallableQueueService callableQueueService = Services.get().get(CallableQueueService.class); List<Future<ActionExecutorContext>> futures = new ArrayList<>(); synchronized (SignalXCommand.class) { long limitedRestThreadNum = callableQueueService.getQueueThreadsNumber() - callableQueueService.getThreadActiveCount(); if (limitedRestThreadNum < workflowActionBeanListForForked.size()) { if (LOG.isDebugEnabled()) { LOG.debug("Limited callable queue rest threads number: " + limitedRestThreadNum + ", needed forked task size: " + workflowActionBeanListForForked.size() + ", tasks will be submitted to queue by async mode."); } submitJobByQueuing = true; } else { if (LOG.isDebugEnabled()) { LOG.debug("Starting forked actions parallely: " + workflowActionBeanListForForked); } for (WorkflowActionBean workflowActionBean : workflowActionBeanListForForked) { futures.add( callableQueueService.submit(callableQueueService.new CallableWrapper<ActionExecutorContext>( new ForkedActionStartXCommand(wfJob, workflowActionBean.getId(), workflowActionBean.getType()), 0)) ); } long startTime = System.currentTimeMillis(); callableQueueService.blockingWait(futures); if (LOG.isDebugEnabled()) { LOG.debug("Execution time of forked actions parallely: " + (System.currentTimeMillis() - startTime) / 1000 + " sec"); } } } for (Future<ActionExecutorContext> result : futures) { if (result == null) { submitJobByQueuing = true; continue; } ActionExecutorContext context = result.get(); Map<String, String> contextVariableMap = ((ForkedActionExecutorContext) context).getContextMap(); LOG.debug("contextVariableMap size of action " + context.getAction().getId() + " is " + contextVariableMap.size()); for (String key : contextVariableMap.keySet()) { context.setVarToWorkflow(key, contextVariableMap.get(key)); } if (context.getJobStatus() != null && context.getJobStatus().equals(Job.Status.FAILED)) { LOG.warn("Action has failed, failing job" + context.getAction().getId()); new ActionStartXCommand(context.getAction().getId(), null).failJob(context); // Fork out more than one transitions, one should be transitions, // one submit fail can't execute KillXCommand queue(new KillXCommand(context.getWorkflow().getId())); updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, (WorkflowActionBean) context.getAction())); if (context.isShouldEndWF()) { endWorkflow = true; } } if (context.getJobStatus() != null && context.getJobStatus().equals(Job.Status.SUSPENDED)) { LOG.warn("Action has failed, failing job" + context.getAction().getId()); new ActionStartXCommand(context.getAction().getId(), null).handleNonTransient(context, null, WorkflowAction.Status.START_MANUAL); updateList.add(new UpdateEntry<WorkflowActionQuery>(WorkflowActionQuery.UPDATE_ACTION_START, (WorkflowActionBean) context.getAction())); if (context.isShouldEndWF()) { endWorkflow = true; } } } if (endWorkflow) { endWF(insertList); } } catch (Exception e) { LOG.error("Error running forked jobs parallely", e); startForkedActionsByQueuing(workflowActionBeanListForForked); submitJobByQueuing = false; } if (submitJobByQueuing && !endWorkflow) { LOG.error("There is error in running forked jobs parallely"); startForkedActionsByQueuing(workflowActionBeanListForForked); } wfJob.setLastModifiedTime(new Date()); updateList.add(new UpdateEntry<WorkflowJobQuery>(WorkflowJobQuery.UPDATE_WORKFLOW_STATUS_INSTANCE_MODIFIED, wfJob)); try { BatchQueryExecutor.getInstance().executeBatchInsertUpdateDelete(insertList, updateList, null); } catch (JPAExecutorException e) { throw new CommandException(e); } LOG.debug("forked actions submitted parallely"); } public void startForkedActionsByQueuing(List<WorkflowActionBean> workflowActionBeanListForForked) throws CommandException { //queuing all jobs, submitted job will fail in precondition for (WorkflowActionBean workflowActionBean : workflowActionBeanListForForked) { LOG.debug("Queuing fork action " + workflowActionBean.getId()); queue(new ActionStartXCommand(workflowActionBean.getId(), workflowActionBean.getType())); } } private void endWF(List<JsonBean> insertList) throws CommandException { updateParentIfNecessary(wfJob, 3); new WfEndXCommand(wfJob).call(); // To delete the WF temp dir SLAEventBean slaEvent2 = SLADbXOperations.createStatusEvent(wfJob.getSlaXml(), wfJob.getId(), Status.FAILED, SlaAppType.WORKFLOW_JOB); if (slaEvent2 != null) { insertList.add(slaEvent2); } } public static ELEvaluator createELEvaluatorForGroup(Configuration conf, String group) { ELEvaluator eval = Services.get().get(ELService.class).createEvaluator(group); for (Map.Entry<String, String> entry : conf) { eval.setVariable(entry.getKey(), entry.getValue()); } return eval; } @SuppressWarnings("unchecked") private String getActionSLAXml(String actionName, String wfXml, String wfConf) throws CommandException { String slaXml = null; try { Element eWfJob = XmlUtils.parseXml(wfXml); for (Element action : (List<Element>) eWfJob.getChildren("action", eWfJob.getNamespace())) { if (action.getAttributeValue("name").equals(actionName) == false) { continue; } Element eSla = XmlUtils.getSLAElement(action); if (eSla != null) { slaXml = XmlUtils.prettyPrint(eSla).toString(); break; } } } catch (Exception e) { throw new CommandException(ErrorCode.E1004, e.getMessage(), e); } return slaXml; } private String resolveSla(Element eSla, Configuration conf) throws CommandException { String slaXml = null; try { ELEvaluator evalSla = SubmitXCommand.createELEvaluatorForGroup(conf, "wf-sla-submit"); slaXml = SubmitXCommand.resolveSla(eSla, evalSla); } catch (Exception e) { throw new CommandException(ErrorCode.E1004, e.getMessage(), e); } return slaXml; } @SuppressWarnings("unchecked") private void createSLARegistrationForAllActions(String wfXml, String user, String group, String strConf) throws CommandException { try { Element eWfJob = XmlUtils.parseXml(wfXml); Configuration conf = new XConfiguration(new StringReader(strConf)); for (Element action : (List<Element>) eWfJob.getChildren("action", eWfJob.getNamespace())) { Element eSla = XmlUtils.getSLAElement(action); if (eSla != null) { String slaXml = resolveSla(eSla, conf); eSla = XmlUtils.parseXml(slaXml); String actionId = Services.get().get(UUIDService.class) .generateChildId(jobId, action.getAttributeValue("name") + ""); SLAEventBean slaEvent = SLADbXOperations.createSlaRegistrationEvent(eSla, actionId, SlaAppType.WORKFLOW_ACTION, user, group); if (slaEvent != null) { insertList.add(slaEvent); } } } } catch (Exception e) { throw new CommandException(ErrorCode.E1007, "workflow:Actions " + jobId, e.getMessage(), e); } } private boolean checkForSuspendNode(WorkflowActionBean newAction) { boolean suspendNewAction = false; try { XConfiguration wfjobConf = new XConfiguration(new StringReader(wfJob.getConf())); String[] values = wfjobConf.getTrimmedStrings(OozieClient.OOZIE_SUSPEND_ON_NODES); if (values != null) { if (values.length == 1 && values[0].equals("*")) { LOG.info("Reached suspend node at [{0}], suspending workflow [{1}]", newAction.getName(), wfJob.getId()); queue(new SuspendXCommand(jobId)); suspendNewAction = true; } else { for (String suspendPoint : values) { if (suspendPoint.equals(newAction.getName())) { LOG.info("Reached suspend node at [{0}], suspending workflow [{1}]", newAction.getName(), wfJob.getId()); queue(new SuspendXCommand(jobId)); suspendNewAction = true; break; } } } } } catch (IOException ex) { LOG.warn("Error reading " + OozieClient.OOZIE_SUSPEND_ON_NODES + ", ignoring [{0}]", ex.getMessage()); } return suspendNewAction; } private boolean checkForSuspendNode(List<WorkflowActionBean> workflowActionBeanListForForked) { for(WorkflowActionBean bean :workflowActionBeanListForForked) if(checkForSuspendNode(bean)){ return true; } return false; } }
googleapis/google-cloud-java
35,285
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/ListSessionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * List sessions response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListSessionsResponse} */ public final class ListSessionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.ListSessionsResponse) ListSessionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSessionsResponse.newBuilder() to construct. private ListSessionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSessionsResponse() { sessions_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSessionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListSessionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListSessionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListSessionsResponse.class, com.google.cloud.dataplex.v1.ListSessionsResponse.Builder.class); } public static final int SESSIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.dataplex.v1.Session> sessions_; /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.dataplex.v1.Session> getSessionsList() { return sessions_; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.dataplex.v1.SessionOrBuilder> getSessionsOrBuilderList() { return sessions_; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ @java.lang.Override public int getSessionsCount() { return sessions_.size(); } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.Session getSessions(int index) { return sessions_.get(index); } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ @java.lang.Override public com.google.cloud.dataplex.v1.SessionOrBuilder getSessionsOrBuilder(int index) { return sessions_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < sessions_.size(); i++) { output.writeMessage(1, sessions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < sessions_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sessions_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.ListSessionsResponse)) { return super.equals(obj); } com.google.cloud.dataplex.v1.ListSessionsResponse other = (com.google.cloud.dataplex.v1.ListSessionsResponse) obj; if (!getSessionsList().equals(other.getSessionsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSessionsCount() > 0) { hash = (37 * hash) + SESSIONS_FIELD_NUMBER; hash = (53 * hash) + getSessionsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.ListSessionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.ListSessionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * List sessions response. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.ListSessionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.ListSessionsResponse) com.google.cloud.dataplex.v1.ListSessionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListSessionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListSessionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.ListSessionsResponse.class, com.google.cloud.dataplex.v1.ListSessionsResponse.Builder.class); } // Construct using com.google.cloud.dataplex.v1.ListSessionsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (sessionsBuilder_ == null) { sessions_ = java.util.Collections.emptyList(); } else { sessions_ = null; sessionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.ServiceProto .internal_static_google_cloud_dataplex_v1_ListSessionsResponse_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.ListSessionsResponse getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.ListSessionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.ListSessionsResponse build() { com.google.cloud.dataplex.v1.ListSessionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.ListSessionsResponse buildPartial() { com.google.cloud.dataplex.v1.ListSessionsResponse result = new com.google.cloud.dataplex.v1.ListSessionsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.dataplex.v1.ListSessionsResponse result) { if (sessionsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { sessions_ = java.util.Collections.unmodifiableList(sessions_); bitField0_ = (bitField0_ & ~0x00000001); } result.sessions_ = sessions_; } else { result.sessions_ = sessionsBuilder_.build(); } } private void buildPartial0(com.google.cloud.dataplex.v1.ListSessionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.ListSessionsResponse) { return mergeFrom((com.google.cloud.dataplex.v1.ListSessionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.ListSessionsResponse other) { if (other == com.google.cloud.dataplex.v1.ListSessionsResponse.getDefaultInstance()) return this; if (sessionsBuilder_ == null) { if (!other.sessions_.isEmpty()) { if (sessions_.isEmpty()) { sessions_ = other.sessions_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSessionsIsMutable(); sessions_.addAll(other.sessions_); } onChanged(); } } else { if (!other.sessions_.isEmpty()) { if (sessionsBuilder_.isEmpty()) { sessionsBuilder_.dispose(); sessionsBuilder_ = null; sessions_ = other.sessions_; bitField0_ = (bitField0_ & ~0x00000001); sessionsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSessionsFieldBuilder() : null; } else { sessionsBuilder_.addAllMessages(other.sessions_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.dataplex.v1.Session m = input.readMessage( com.google.cloud.dataplex.v1.Session.parser(), extensionRegistry); if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.add(m); } else { sessionsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.dataplex.v1.Session> sessions_ = java.util.Collections.emptyList(); private void ensureSessionsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { sessions_ = new java.util.ArrayList<com.google.cloud.dataplex.v1.Session>(sessions_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Session, com.google.cloud.dataplex.v1.Session.Builder, com.google.cloud.dataplex.v1.SessionOrBuilder> sessionsBuilder_; /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Session> getSessionsList() { if (sessionsBuilder_ == null) { return java.util.Collections.unmodifiableList(sessions_); } else { return sessionsBuilder_.getMessageList(); } } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public int getSessionsCount() { if (sessionsBuilder_ == null) { return sessions_.size(); } else { return sessionsBuilder_.getCount(); } } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public com.google.cloud.dataplex.v1.Session getSessions(int index) { if (sessionsBuilder_ == null) { return sessions_.get(index); } else { return sessionsBuilder_.getMessage(index); } } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder setSessions(int index, com.google.cloud.dataplex.v1.Session value) { if (sessionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSessionsIsMutable(); sessions_.set(index, value); onChanged(); } else { sessionsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder setSessions( int index, com.google.cloud.dataplex.v1.Session.Builder builderForValue) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.set(index, builderForValue.build()); onChanged(); } else { sessionsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder addSessions(com.google.cloud.dataplex.v1.Session value) { if (sessionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSessionsIsMutable(); sessions_.add(value); onChanged(); } else { sessionsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder addSessions(int index, com.google.cloud.dataplex.v1.Session value) { if (sessionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSessionsIsMutable(); sessions_.add(index, value); onChanged(); } else { sessionsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder addSessions(com.google.cloud.dataplex.v1.Session.Builder builderForValue) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.add(builderForValue.build()); onChanged(); } else { sessionsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder addSessions( int index, com.google.cloud.dataplex.v1.Session.Builder builderForValue) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.add(index, builderForValue.build()); onChanged(); } else { sessionsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder addAllSessions( java.lang.Iterable<? extends com.google.cloud.dataplex.v1.Session> values) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sessions_); onChanged(); } else { sessionsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder clearSessions() { if (sessionsBuilder_ == null) { sessions_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { sessionsBuilder_.clear(); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public Builder removeSessions(int index) { if (sessionsBuilder_ == null) { ensureSessionsIsMutable(); sessions_.remove(index); onChanged(); } else { sessionsBuilder_.remove(index); } return this; } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public com.google.cloud.dataplex.v1.Session.Builder getSessionsBuilder(int index) { return getSessionsFieldBuilder().getBuilder(index); } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public com.google.cloud.dataplex.v1.SessionOrBuilder getSessionsOrBuilder(int index) { if (sessionsBuilder_ == null) { return sessions_.get(index); } else { return sessionsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public java.util.List<? extends com.google.cloud.dataplex.v1.SessionOrBuilder> getSessionsOrBuilderList() { if (sessionsBuilder_ != null) { return sessionsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(sessions_); } } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public com.google.cloud.dataplex.v1.Session.Builder addSessionsBuilder() { return getSessionsFieldBuilder() .addBuilder(com.google.cloud.dataplex.v1.Session.getDefaultInstance()); } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public com.google.cloud.dataplex.v1.Session.Builder addSessionsBuilder(int index) { return getSessionsFieldBuilder() .addBuilder(index, com.google.cloud.dataplex.v1.Session.getDefaultInstance()); } /** * * * <pre> * Sessions under a given environment. * </pre> * * <code>repeated .google.cloud.dataplex.v1.Session sessions = 1;</code> */ public java.util.List<com.google.cloud.dataplex.v1.Session.Builder> getSessionsBuilderList() { return getSessionsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Session, com.google.cloud.dataplex.v1.Session.Builder, com.google.cloud.dataplex.v1.SessionOrBuilder> getSessionsFieldBuilder() { if (sessionsBuilder_ == null) { sessionsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.dataplex.v1.Session, com.google.cloud.dataplex.v1.Session.Builder, com.google.cloud.dataplex.v1.SessionOrBuilder>( sessions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); sessions_ = null; } return sessionsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results in the list. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.ListSessionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.ListSessionsResponse) private static final com.google.cloud.dataplex.v1.ListSessionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.ListSessionsResponse(); } public static com.google.cloud.dataplex.v1.ListSessionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSessionsResponse> PARSER = new com.google.protobuf.AbstractParser<ListSessionsResponse>() { @java.lang.Override public ListSessionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSessionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSessionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.ListSessionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,338
java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/UriSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/accounts/v1/checkoutsettings.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.accounts.v1; /** * * * <pre> * URL settings for cart or checkout URL. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.UriSettings} */ public final class UriSettings extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.UriSettings) UriSettingsOrBuilder { private static final long serialVersionUID = 0L; // Use UriSettings.newBuilder() to construct. private UriSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UriSettings() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UriSettings(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.CheckoutsettingsProto .internal_static_google_shopping_merchant_accounts_v1_UriSettings_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.CheckoutsettingsProto .internal_static_google_shopping_merchant_accounts_v1_UriSettings_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.UriSettings.class, com.google.shopping.merchant.accounts.v1.UriSettings.Builder.class); } private int uriTemplateCase_ = 0; @SuppressWarnings("serial") private java.lang.Object uriTemplate_; public enum UriTemplateCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { CHECKOUT_URI_TEMPLATE(1), CART_URI_TEMPLATE(2), URITEMPLATE_NOT_SET(0); private final int value; private UriTemplateCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static UriTemplateCase valueOf(int value) { return forNumber(value); } public static UriTemplateCase forNumber(int value) { switch (value) { case 1: return CHECKOUT_URI_TEMPLATE; case 2: return CART_URI_TEMPLATE; case 0: return URITEMPLATE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public UriTemplateCase getUriTemplateCase() { return UriTemplateCase.forNumber(uriTemplateCase_); } public static final int CHECKOUT_URI_TEMPLATE_FIELD_NUMBER = 1; /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return Whether the checkoutUriTemplate field is set. */ public boolean hasCheckoutUriTemplate() { return uriTemplateCase_ == 1; } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return The checkoutUriTemplate. */ public java.lang.String getCheckoutUriTemplate() { java.lang.Object ref = ""; if (uriTemplateCase_ == 1) { ref = uriTemplate_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (uriTemplateCase_ == 1) { uriTemplate_ = s; } return s; } } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return The bytes for checkoutUriTemplate. */ public com.google.protobuf.ByteString getCheckoutUriTemplateBytes() { java.lang.Object ref = ""; if (uriTemplateCase_ == 1) { ref = uriTemplate_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (uriTemplateCase_ == 1) { uriTemplate_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CART_URI_TEMPLATE_FIELD_NUMBER = 2; /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return Whether the cartUriTemplate field is set. */ public boolean hasCartUriTemplate() { return uriTemplateCase_ == 2; } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return The cartUriTemplate. */ public java.lang.String getCartUriTemplate() { java.lang.Object ref = ""; if (uriTemplateCase_ == 2) { ref = uriTemplate_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (uriTemplateCase_ == 2) { uriTemplate_ = s; } return s; } } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return The bytes for cartUriTemplate. */ public com.google.protobuf.ByteString getCartUriTemplateBytes() { java.lang.Object ref = ""; if (uriTemplateCase_ == 2) { ref = uriTemplate_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (uriTemplateCase_ == 2) { uriTemplate_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (uriTemplateCase_ == 1) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, uriTemplate_); } if (uriTemplateCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uriTemplate_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (uriTemplateCase_ == 1) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, uriTemplate_); } if (uriTemplateCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uriTemplate_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.accounts.v1.UriSettings)) { return super.equals(obj); } com.google.shopping.merchant.accounts.v1.UriSettings other = (com.google.shopping.merchant.accounts.v1.UriSettings) obj; if (!getUriTemplateCase().equals(other.getUriTemplateCase())) return false; switch (uriTemplateCase_) { case 1: if (!getCheckoutUriTemplate().equals(other.getCheckoutUriTemplate())) return false; break; case 2: if (!getCartUriTemplate().equals(other.getCartUriTemplate())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (uriTemplateCase_) { case 1: hash = (37 * hash) + CHECKOUT_URI_TEMPLATE_FIELD_NUMBER; hash = (53 * hash) + getCheckoutUriTemplate().hashCode(); break; case 2: hash = (37 * hash) + CART_URI_TEMPLATE_FIELD_NUMBER; hash = (53 * hash) + getCartUriTemplate().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.accounts.v1.UriSettings parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.shopping.merchant.accounts.v1.UriSettings prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * URL settings for cart or checkout URL. * </pre> * * Protobuf type {@code google.shopping.merchant.accounts.v1.UriSettings} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.UriSettings) com.google.shopping.merchant.accounts.v1.UriSettingsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.accounts.v1.CheckoutsettingsProto .internal_static_google_shopping_merchant_accounts_v1_UriSettings_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.accounts.v1.CheckoutsettingsProto .internal_static_google_shopping_merchant_accounts_v1_UriSettings_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.accounts.v1.UriSettings.class, com.google.shopping.merchant.accounts.v1.UriSettings.Builder.class); } // Construct using com.google.shopping.merchant.accounts.v1.UriSettings.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; uriTemplateCase_ = 0; uriTemplate_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.accounts.v1.CheckoutsettingsProto .internal_static_google_shopping_merchant_accounts_v1_UriSettings_descriptor; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UriSettings getDefaultInstanceForType() { return com.google.shopping.merchant.accounts.v1.UriSettings.getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UriSettings build() { com.google.shopping.merchant.accounts.v1.UriSettings result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UriSettings buildPartial() { com.google.shopping.merchant.accounts.v1.UriSettings result = new com.google.shopping.merchant.accounts.v1.UriSettings(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.shopping.merchant.accounts.v1.UriSettings result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs(com.google.shopping.merchant.accounts.v1.UriSettings result) { result.uriTemplateCase_ = uriTemplateCase_; result.uriTemplate_ = this.uriTemplate_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.accounts.v1.UriSettings) { return mergeFrom((com.google.shopping.merchant.accounts.v1.UriSettings) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.UriSettings other) { if (other == com.google.shopping.merchant.accounts.v1.UriSettings.getDefaultInstance()) return this; switch (other.getUriTemplateCase()) { case CHECKOUT_URI_TEMPLATE: { uriTemplateCase_ = 1; uriTemplate_ = other.uriTemplate_; onChanged(); break; } case CART_URI_TEMPLATE: { uriTemplateCase_ = 2; uriTemplate_ = other.uriTemplate_; onChanged(); break; } case URITEMPLATE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); uriTemplateCase_ = 1; uriTemplate_ = s; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); uriTemplateCase_ = 2; uriTemplate_ = s; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int uriTemplateCase_ = 0; private java.lang.Object uriTemplate_; public UriTemplateCase getUriTemplateCase() { return UriTemplateCase.forNumber(uriTemplateCase_); } public Builder clearUriTemplate() { uriTemplateCase_ = 0; uriTemplate_ = null; onChanged(); return this; } private int bitField0_; /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return Whether the checkoutUriTemplate field is set. */ @java.lang.Override public boolean hasCheckoutUriTemplate() { return uriTemplateCase_ == 1; } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return The checkoutUriTemplate. */ @java.lang.Override public java.lang.String getCheckoutUriTemplate() { java.lang.Object ref = ""; if (uriTemplateCase_ == 1) { ref = uriTemplate_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (uriTemplateCase_ == 1) { uriTemplate_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return The bytes for checkoutUriTemplate. */ @java.lang.Override public com.google.protobuf.ByteString getCheckoutUriTemplateBytes() { java.lang.Object ref = ""; if (uriTemplateCase_ == 1) { ref = uriTemplate_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (uriTemplateCase_ == 1) { uriTemplate_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @param value The checkoutUriTemplate to set. * @return This builder for chaining. */ public Builder setCheckoutUriTemplate(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uriTemplateCase_ = 1; uriTemplate_ = value; onChanged(); return this; } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @return This builder for chaining. */ public Builder clearCheckoutUriTemplate() { if (uriTemplateCase_ == 1) { uriTemplateCase_ = 0; uriTemplate_ = null; onChanged(); } return this; } /** * * * <pre> * Checkout URL template. When the placeholders are expanded will redirect * the buyer to the merchant checkout page with the item in the cart. For * more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string checkout_uri_template = 1;</code> * * @param value The bytes for checkoutUriTemplate to set. * @return This builder for chaining. */ public Builder setCheckoutUriTemplateBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uriTemplateCase_ = 1; uriTemplate_ = value; onChanged(); return this; } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return Whether the cartUriTemplate field is set. */ @java.lang.Override public boolean hasCartUriTemplate() { return uriTemplateCase_ == 2; } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return The cartUriTemplate. */ @java.lang.Override public java.lang.String getCartUriTemplate() { java.lang.Object ref = ""; if (uriTemplateCase_ == 2) { ref = uriTemplate_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (uriTemplateCase_ == 2) { uriTemplate_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return The bytes for cartUriTemplate. */ @java.lang.Override public com.google.protobuf.ByteString getCartUriTemplateBytes() { java.lang.Object ref = ""; if (uriTemplateCase_ == 2) { ref = uriTemplate_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (uriTemplateCase_ == 2) { uriTemplate_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @param value The cartUriTemplate to set. * @return This builder for chaining. */ public Builder setCartUriTemplate(java.lang.String value) { if (value == null) { throw new NullPointerException(); } uriTemplateCase_ = 2; uriTemplate_ = value; onChanged(); return this; } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @return This builder for chaining. */ public Builder clearCartUriTemplate() { if (uriTemplateCase_ == 2) { uriTemplateCase_ = 0; uriTemplate_ = null; onChanged(); } return this; } /** * * * <pre> * Cart URL template. When the placeholders are expanded will redirect the * buyer to the cart page on the merchant website with the selected * item in cart. For more details, check the [help center * doc](https://support.google.com/merchants/answer/13945960#method1&amp;zippy=%2Cproduct-level-url-formatting%2Caccount-level-url-formatting) * </pre> * * <code>string cart_uri_template = 2;</code> * * @param value The bytes for cartUriTemplate to set. * @return This builder for chaining. */ public Builder setCartUriTemplateBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); uriTemplateCase_ = 2; uriTemplate_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.UriSettings) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.UriSettings) private static final com.google.shopping.merchant.accounts.v1.UriSettings DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.UriSettings(); } public static com.google.shopping.merchant.accounts.v1.UriSettings getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UriSettings> PARSER = new com.google.protobuf.AbstractParser<UriSettings>() { @java.lang.Override public UriSettings parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UriSettings> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UriSettings> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.accounts.v1.UriSettings getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/impala
35,627
fe/src/main/java/org/apache/impala/planner/KuduScanNode.java
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.impala.planner; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; import org.apache.impala.analysis.Analyzer; import org.apache.impala.analysis.BinaryPredicate; import org.apache.impala.analysis.BoolLiteral; import org.apache.impala.analysis.DateLiteral; import org.apache.impala.analysis.Expr; import org.apache.impala.analysis.InPredicate; import org.apache.impala.analysis.IsNullPredicate; import org.apache.impala.analysis.LiteralExpr; import org.apache.impala.analysis.MultiAggregateInfo; import org.apache.impala.analysis.NumericLiteral; import org.apache.impala.analysis.SlotDescriptor; import org.apache.impala.analysis.SlotRef; import org.apache.impala.analysis.StringLiteral; import org.apache.impala.analysis.TableRef; import org.apache.impala.analysis.TupleDescriptor; import org.apache.impala.catalog.FeKuduTable; import org.apache.impala.catalog.KuduColumn; import org.apache.impala.catalog.Type; import org.apache.impala.common.AnalysisException; import org.apache.impala.common.ImpalaRuntimeException; import org.apache.impala.common.InternalException; import org.apache.impala.service.BackendConfig; import org.apache.impala.thrift.TExplainLevel; import org.apache.impala.thrift.TKuduReplicaSelection; import org.apache.impala.thrift.TKuduScanNode; import org.apache.impala.thrift.TNetworkAddress; import org.apache.impala.thrift.TPlanNode; import org.apache.impala.thrift.TPlanNodeType; import org.apache.impala.thrift.TQueryOptions; import org.apache.impala.thrift.TScanRange; import org.apache.impala.thrift.TScanRangeLocation; import org.apache.impala.thrift.TScanRangeLocationList; import org.apache.impala.thrift.TScanRangeSpec; import org.apache.impala.util.ExprUtil; import org.apache.impala.util.KuduUtil; import org.apache.impala.util.ExecutorMembershipSnapshot; import org.apache.kudu.ColumnSchema; import org.apache.kudu.Schema; import org.apache.kudu.client.KuduClient; import org.apache.kudu.client.KuduPredicate; import org.apache.kudu.client.KuduPredicate.ComparisonOp; import org.apache.kudu.client.KuduScanToken; import org.apache.kudu.client.KuduScanToken.KuduScanTokenBuilder; import org.apache.kudu.client.LocatedTablet; import org.apache.kudu.consensus.Metadata.RaftPeerPB.Role; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; /** * Scan of a single Kudu table. * * Extracts predicates that can be pushed down to Kudu. Currently only binary predicates * that have a constant expression on one side and a slot ref on the other can be * evaluated by Kudu. * * Uses the Kudu ScanToken API to generate a set of Kudu "scan tokens" which are used for * scheduling and initializing the scanners. Scan tokens are opaque objects that represent * a scan for some Kudu data on a tablet (currently one token represents one tablet), and * it contains the tablet locations and all information needed to produce a Kudu scanner, * including the projected columns and predicates that are pushed down. * * After KUDU-1065 is resolved, Kudu will also prune the tablets that don't need to be * scanned, and only the tokens for those tablets will be returned. */ public class KuduScanNode extends ScanNode { private final static Logger LOG = LoggerFactory.getLogger(KuduScanNode.class); private final FeKuduTable kuduTable_; // True if this scan node should use the MT implementation in the backend. // Set in computeNodeResourceProfile(). private boolean useMtScanNode_; // True if the query option of replica selection is set as leader-only. private boolean replicaSelectionLeaderOnly_ = false; // Indexes for the set of hosts that will be used for the query. // From analyzer.getHostIndex().getIndex(address) private final Set<Integer> hostIndexSet_ = new HashSet<>(); // List of conjuncts that can be pushed down to Kudu. Used for computing stats and // explain strings. private final List<Expr> kuduConjuncts_ = new ArrayList<>(); // Exprs in kuduConjuncts_ converted to KuduPredicates. private final List<KuduPredicate> kuduPredicates_ = new ArrayList<>(); // Slot that is used to record the Kudu metadata for the count(*) aggregation if // this scan node has the count(*) optimization enabled. private SlotDescriptor countStarSlot_ = null; // It is used to indicate if the input query returns not more than one row // from Kudu. It is set as TRUE in extractKuduConjuncts() if the number of // primary key columns in equivalence predicates pushed down to Kudu equals // the total number of primary key columns of the Kudu table. // It is used to adjust the cardinality estimation to speed up point lookup // for Kudu primary keys by enabling small query optimization. boolean isPointLookupQuery_ = false; // It is used to indicate current kudu predicate should not be removed from conjuncts. // If the current predicate is a comparison predicate with ambiguous timestamp, it may // need to check again after actually scanning. For example, if we have two rows with // column 'ts' 01:40:00(Local, UTC is 05:40:00) and 01:20:00(Local, UTC is 06:20:00), // and the predicate 'ts' < 01:30:00(Local, convert to UTC is 05:30:00 or 06:30:00), we // should push ts < 06:30:00(UTC) to Kudu to avoid missing the row with ts // 01:20:00(Local, UTC is 06:20:00) and also need to filter out the row with ts // 01:40:00(Local, UTC is 05:40:00) after actually scanning. boolean currentPredicateNeedCheckAgain_ = false; public KuduScanNode(PlanNodeId id, TupleDescriptor desc, List<Expr> conjuncts, MultiAggregateInfo aggInfo, TableRef kuduTblRef) { super(id, desc, "SCAN KUDU"); kuduTable_ = (FeKuduTable) desc_.getTable(); conjuncts_ = conjuncts; aggInfo_ = aggInfo; tableNumRowsHint_ = kuduTblRef.getTableNumRowsHint(); } @Override public void init(Analyzer analyzer) throws ImpalaRuntimeException { conjuncts_ = orderConjunctsByCost(conjuncts_); KuduClient client = KuduUtil.getKuduClient(kuduTable_.getKuduMasterHosts()); try { // Get the KuduTable from the analyzer to retrieve the cached KuduTable // for this query and prevent multiple openTable calls for a single query. org.apache.kudu.client.KuduTable rpcTable = analyzer.getKuduTable(kuduTable_); validateSchema(rpcTable); if (canApplyCountStarOptimization(analyzer)) { Preconditions.checkState(desc_.getPath().destTable() != null); Preconditions.checkState(kuduConjuncts_.isEmpty()); countStarSlot_ = applyCountStarOptimization(analyzer); } // Extract predicates that can be evaluated by Kudu. extractKuduConjuncts(analyzer, client, rpcTable); // Materialize the slots of the remaining conjuncts (i.e. those not pushed to Kudu) analyzer.materializeSlots(conjuncts_); // Compute mem layout before the scan range locations because creation of the Kudu // scan tokens depends on having a mem layout. computeMemLayout(analyzer); // Creates Kudu scan tokens and sets the scan range locations. computeScanRangeLocations(analyzer, client, rpcTable); } catch (Exception e) { throw new ImpalaRuntimeException("Unable to initialize the Kudu scan node", e); } computeStats(analyzer); } /** * Validate the columns Impala expects are actually in the Kudu table. */ private void validateSchema(org.apache.kudu.client.KuduTable rpcTable) throws ImpalaRuntimeException { Schema tableSchema = rpcTable.getSchema(); for (SlotDescriptor desc: getTupleDesc().getSlots()) { if (!desc.isScanSlot()) continue; String colName = ((KuduColumn) desc.getColumn()).getKuduName(); Type colType = desc.getColumn().getType(); ColumnSchema kuduCol = null; try { kuduCol = tableSchema.getColumn(colName); } catch (Exception e) { throw new ImpalaRuntimeException("Column '" + colName + "' not found in kudu " + "table " + rpcTable.getName() + ". The table metadata in Impala may be " + "outdated and need to be refreshed."); } Type kuduColType = KuduUtil.toImpalaType(kuduCol.getType(), kuduCol.getTypeAttributes()); if (!colType.equals(kuduColType)) { throw new ImpalaRuntimeException("Column '" + colName + "' is type " + kuduColType.toSql() + " but Impala expected " + colType.toSql() + ". The table metadata in Impala may be outdated and need to be refreshed."); } if (desc.getIsNullable() != kuduCol.isNullable()) { String expected; String actual; if (desc.getIsNullable()) { expected = "nullable"; actual = "not nullable"; } else { expected = "not nullable"; actual = "nullable"; } throw new ImpalaRuntimeException("Column '" + colName + "' is " + actual + " but Impala expected it to be " + expected + ". The table metadata in Impala may be outdated and need to be refreshed."); } } } /** * Compute the scan range locations for the given table using the scan tokens. */ private void computeScanRangeLocations(Analyzer analyzer, KuduClient client, org.apache.kudu.client.KuduTable rpcTable) throws ImpalaRuntimeException { scanRangeSpecs_ = new TScanRangeSpec(); replicaSelectionLeaderOnly_ = (analyzer.getQueryOptions().getKudu_replica_selection() == TKuduReplicaSelection.LEADER_ONLY); List<KuduScanToken> scanTokens = createScanTokens(analyzer, client, rpcTable); for (KuduScanToken token: scanTokens) { LocatedTablet tablet = token.getTablet(); List<TScanRangeLocation> locations = new ArrayList<>(); if (tablet.getReplicas().isEmpty()) { throw new ImpalaRuntimeException(String.format( "At least one tablet does not have any replicas. Tablet ID: %s", new String(tablet.getTabletId(), Charsets.UTF_8))); } for (LocatedTablet.Replica replica: tablet.getReplicas()) { // Skip non-leader replicas if query option KUDU_REPLICA_SELECTION is set as // LEADER_ONLY. if (replicaSelectionLeaderOnly_ && !replica.getRole().equals(Role.LEADER.toString())) { continue; } TNetworkAddress address = new TNetworkAddress(replica.getRpcHost(), replica.getRpcPort()); // Use the network address to look up the host in the global list Integer hostIndex = analyzer.getHostIndex().getOrAddIndex(address); locations.add(new TScanRangeLocation(hostIndex)); hostIndexSet_.add(hostIndex); } TScanRange scanRange = new TScanRange(); try { scanRange.setKudu_scan_token(token.serialize()); } catch (IOException e) { throw new ImpalaRuntimeException("Unable to serialize Kudu scan token=" + token.toString(), e); } TScanRangeLocationList locs = new TScanRangeLocationList(); locs.setScan_range(scanRange); locs.setLocations(locations); scanRangeSpecs_.addToConcrete_ranges(locs); } } /** * Returns KuduScanTokens for this scan given the projected columns and predicates that * will be pushed to Kudu. The projected Kudu columns are ordered by offset in an * Impala tuple to make the Impala and Kudu tuple layouts identical. */ private List<KuduScanToken> createScanTokens(Analyzer analyzer, KuduClient client, org.apache.kudu.client.KuduTable rpcTable) { List<String> projectedCols = new ArrayList<>(); for (SlotDescriptor desc: getTupleDesc().getSlotsOrderedByOffset()) { if (!isCountStarOptimizationDescriptor(desc)) { projectedCols.add(((KuduColumn) desc.getColumn()).getKuduName()); } } KuduScanTokenBuilder tokenBuilder = client.newScanTokenBuilder(rpcTable); tokenBuilder.setProjectedColumnNames(projectedCols); long split_size_hint = analyzer.getQueryOptions() .getTargeted_kudu_scan_range_length(); if (split_size_hint > 0) tokenBuilder.setSplitSizeBytes(split_size_hint); for (KuduPredicate predicate: kuduPredicates_) tokenBuilder.addPredicate(predicate); return tokenBuilder.build(); } @Override protected double computeSelectivity() { List<Expr> allConjuncts = Lists.newArrayList( Iterables.concat(conjuncts_, kuduConjuncts_)); return computeCombinedSelectivity(allConjuncts); } /** * Estimate the number of impalad nodes that this scan node will execute on (which is * ultimately determined by the scheduling done by the backend's Scheduler). * Assume that scan ranges that can be scheduled locally will be, and that scan * ranges that cannot will be round-robined across the cluster. */ protected void computeNumNodes(Analyzer analyzer) { ExecutorMembershipSnapshot cluster = ExecutorMembershipSnapshot.getCluster(); final int maxInstancesPerNode = getMaxInstancesPerNode(analyzer); final int maxPossibleInstances = analyzer.numExecutorsForPlanning() * maxInstancesPerNode; int totalNodes = 0; int totalInstances = 0; int numLocalRanges = 0; int numRemoteRanges = 0; // Counts the number of local ranges, capped at maxInstancesPerNode. Map<TNetworkAddress, Integer> localRangeCounts = new HashMap<>(); // Sum of the counter values in localRangeCounts. int totalLocalParallelism = 0; if (scanRangeSpecs_.isSetConcrete_ranges()) { for (TScanRangeLocationList range : scanRangeSpecs_.concrete_ranges) { boolean anyLocal = false; if (range.isSetLocations()) { for (TScanRangeLocation loc : range.locations) { TNetworkAddress address = analyzer.getHostIndex().getEntry(loc.getHost_idx()); if (cluster.contains(address)) { anyLocal = true; // Use the full tserver address (including port) to account for the test // minicluster where there are multiple tservers and impalads on a single // host. This assumes that when an impalad is colocated with a tserver, // there are the same number of impalads as tservers on this host in this // cluster. int count = localRangeCounts.getOrDefault(address, 0); if (count < maxInstancesPerNode) { ++totalLocalParallelism; localRangeCounts.put(address, count + 1); } } } } // This range has at least one replica with a colocated impalad, so assume it // will be scheduled on one of those nodes. if (anyLocal) { ++numLocalRanges; } else { ++numRemoteRanges; } // Approximate the number of nodes that will execute locally assigned ranges to // be the smaller of the number of locally assigned ranges and the number of // hosts that hold replica for those ranges. int numLocalNodes = Math.min(numLocalRanges, localRangeCounts.size()); // The remote ranges are round-robined across all the impalads. int numRemoteNodes = Math.min(numRemoteRanges, analyzer.numExecutorsForPlanning()); // The local and remote assignments may overlap, but we don't know by how much // so conservatively assume no overlap. totalNodes = Math.min(numLocalNodes + numRemoteNodes, analyzer.numExecutorsForPlanning()); int numLocalInstances = Math.min(numLocalRanges, totalLocalParallelism); totalInstances = Math.min(numLocalInstances + numRemoteRanges, totalNodes * maxInstancesPerNode); // Exit early if we have maxed out our estimate of hosts/instances, to avoid // extraneous work in case the number of scan ranges dominates the number of // nodes. if (totalInstances == maxPossibleInstances) break; } } numNodes_ = Math.max(totalNodes, 1); numInstances_ = Math.max(totalInstances, 1); } @Override public void computeStats(Analyzer analyzer) { super.computeStats(analyzer); computeNumNodes(analyzer); // Update the cardinality, hint value will be used when table has no stats. inputCardinality_ = cardinality_ = kuduTable_.getNumRows() == -1 ? tableNumRowsHint_ : kuduTable_.getNumRows(); if (isPointLookupQuery_) { // Adjust input and output cardinality for point lookup. // Planner don't create KuduScanNode for query with closure "limit 0" so // we can assume "limit" is not less than 1 here and don't need to call // capCardinalityAtLimit(). if (cardinality_ != 0) cardinality_ = 1; inputCardinality_ = cardinality_; } else { cardinality_ = applyConjunctsSelectivity(cardinality_); cardinality_ = capCardinalityAtLimit(cardinality_); } if (LOG.isTraceEnabled()) { LOG.trace("computeStats KuduScan: cardinality=" + Long.toString(cardinality_)); } } @Override public void computeProcessingCost(TQueryOptions queryOptions) { processingCost_ = computeScanProcessingCost(queryOptions); } @Override public void computeNodeResourceProfile(TQueryOptions queryOptions) { // The bulk of memory used by Kudu scan node is generally utilized by the // RowbatchQueue plus the row batches filled in by the scanner threads and // waiting to be queued into the RowbatchQueue. Due to a number of factors // like variable length string columns, mem pool usage pattern, // variability of the number of scanner threads being spawned and the // variability of the average RowbatchQueue size, it is increasingly // difficult to precisely estimate the memory usage. Therefore, we fall back // to a more simpler approach of using empirically derived estimates. int numOfScanRanges = scanRangeSpecs_.getConcrete_rangesSize(); int perHostScanRanges = estimatePerHostScanRanges(numOfScanRanges); int maxScannerThreads = computeMaxNumberOfScannerThreads(queryOptions, perHostScanRanges); long estimated_bytes_per_column_per_thread = BackendConfig.INSTANCE.getBackendCfg(). kudu_scanner_thread_estimated_bytes_per_column; long max_estimated_bytes_per_thread = BackendConfig.INSTANCE.getBackendCfg(). kudu_scanner_thread_max_estimated_bytes; long mem_estimate_per_thread = Math.min(getNumMaterializedSlots(desc_) * estimated_bytes_per_column_per_thread, max_estimated_bytes_per_thread); useMtScanNode_ = Planner.useMTFragment(queryOptions); nodeResourceProfile_ = new ResourceProfileBuilder() .setMemEstimateBytes(mem_estimate_per_thread * maxScannerThreads) .setThreadReservation(useMtScanNode_ ? 0 : 1).build(); } @Override protected String getNodeExplainString(String prefix, String detailPrefix, TExplainLevel detailLevel) { StringBuilder result = new StringBuilder(); String aliasStr = desc_.hasExplicitAlias() ? " " + desc_.getAlias() : ""; result.append( String.format(replicaSelectionLeaderOnly_ ? "%s%s:%s [%s%s, LEADER-only]\n" : "%s%s:%s [%s%s]\n", prefix, id_.toString(), displayName_, kuduTable_.getFullName(), aliasStr)); switch (detailLevel) { case MINIMAL: break; case STANDARD: // Fallthrough intended. case EXTENDED: // Fallthrough intended. case VERBOSE: { if (!conjuncts_.isEmpty()) { result.append(detailPrefix + "predicates: " + Expr.getExplainString(conjuncts_, detailLevel) + "\n"); } if (!kuduConjuncts_.isEmpty()) { result.append(detailPrefix + "kudu predicates: " + Expr.getExplainString(kuduConjuncts_, detailLevel) + "\n"); } if (!runtimeFilters_.isEmpty()) { result.append(detailPrefix + "runtime filters: "); result.append(getRuntimeFilterExplainString(false, detailLevel)); } } } return result.toString(); } @Override protected void toThrift(TPlanNode node) { node.node_type = TPlanNodeType.KUDU_SCAN_NODE; node.kudu_scan_node = new TKuduScanNode(desc_.getId().asInt()); node.kudu_scan_node.setUse_mt_scan_node(useMtScanNode_); Preconditions.checkState((optimizedAggSmap_ == null) == (countStarSlot_ == null)); if (countStarSlot_ != null) { node.kudu_scan_node.setCount_star_slot_offset(countStarSlot_.getByteOffset()); } } /** * Extracts predicates from conjuncts_ that can be pushed down to Kudu. Currently only * binary predicates that have a constant expression on one side and a slot ref on the * other can be evaluated by Kudu. Only looks at comparisons of constants (i.e., the * bounds of the result can be evaluated with Expr::GetValue(NULL)). If a conjunct can * be converted into this form, the normalized expr is added to kuduConjuncts_, a * KuduPredicate is added to kuduPredicates_, and the original expr from conjuncts_ is * removed. */ private void extractKuduConjuncts(Analyzer analyzer, KuduClient client, org.apache.kudu.client.KuduTable rpcTable) { // The set of primary key column index which are in equality predicates where // it's compared to a constant, and will be pushed down to Kudu. Set<Integer> primaryKeyColsInEqualPred = new HashSet<>(); ListIterator<Expr> it = conjuncts_.listIterator(); while (it.hasNext()) { Expr predicate = it.next(); if (tryConvertBinaryKuduPredicate(analyzer, rpcTable, predicate, primaryKeyColsInEqualPred) || tryConvertInListKuduPredicate(analyzer, rpcTable, predicate) || tryConvertIsNullKuduPredicate(analyzer, rpcTable, predicate)) { if (currentPredicateNeedCheckAgain_) { currentPredicateNeedCheckAgain_ = false; } else { it.remove(); } } } if (primaryKeyColsInEqualPred.size() >= 1 && primaryKeyColsInEqualPred.size() == rpcTable.getSchema().getPrimaryKeyColumnCount()) { isPointLookupQuery_ = true; } } /** * If 'expr' can be converted to a KuduPredicate, returns true and updates * kuduPredicates_ and kuduConjuncts_. */ private boolean tryConvertBinaryKuduPredicate(Analyzer analyzer, org.apache.kudu.client.KuduTable table, Expr expr, Set<Integer> primaryKeyColsInEqualPred) { if (!(expr instanceof BinaryPredicate)) return false; BinaryPredicate predicate = (BinaryPredicate) expr; // TODO KUDU-931 look into handling implicit/explicit casts on the SlotRef. ComparisonOp op = getKuduOperator(predicate.getOp()); if (op == null) return false; if (!(predicate.getChild(0) instanceof SlotRef)) return false; SlotRef ref = (SlotRef) predicate.getChild(0); if (!(predicate.getChild(1) instanceof LiteralExpr)) return false; LiteralExpr literal = (LiteralExpr) predicate.getChild(1); // Cannot push predicates with null literal values (KUDU-1595). if (Expr.IS_NULL_LITERAL.apply(literal)) return false; String colName = ((KuduColumn) ref.getDesc().getColumn()).getKuduName(); ColumnSchema column = table.getSchema().getColumn(colName); KuduPredicate kuduPredicate = null; switch (literal.getType().getPrimitiveType()) { case BOOLEAN: { kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, ((BoolLiteral)literal).getValue()); break; } case TINYINT: case SMALLINT: case INT: { kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, ((NumericLiteral)literal).getIntValue()); break; } case BIGINT: { kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, ((NumericLiteral)literal).getLongValue()); break; } case FLOAT: { kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, (float)((NumericLiteral)literal).getDoubleValue()); break; } case DOUBLE: { kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, ((NumericLiteral)literal).getDoubleValue()); break; } case STRING: case VARCHAR: case CHAR: { StringLiteral strLit = (StringLiteral)literal; if (!strLit.isValidUtf8()) return false; kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, strLit.getUnescapedValue()); break; } case BINARY: { StringLiteral strLit = (StringLiteral)literal; kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, strLit.getBinValue()); break; } case TIMESTAMP: { try { // TODO: Simplify when Impala supports a 64-bit TIMESTAMP type. kuduPredicate = analyzer.getQueryOptions().isConvert_kudu_utc_timestamps() ? convertLocalTimestampBinaryKuduPredicate(analyzer, column, op, literal) : KuduPredicate.newComparisonPredicate(column, op, ExprUtil.utcTimestampToUnixTimeMicros(analyzer, literal)); } catch (Exception e) { LOG.info("Exception converting Kudu timestamp predicate: " + expr.toSql(), e); return false; } break; } case DATE: kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, ((DateLiteral)literal).getValue()); break; case DECIMAL: { kuduPredicate = KuduPredicate.newComparisonPredicate(column, op, ((NumericLiteral)literal).getValue()); break; } default: //All supported types are covered, should not reach default case Preconditions.checkState(false); } Preconditions.checkState(kuduPredicate != null); kuduConjuncts_.add(predicate); kuduPredicates_.add(kuduPredicate); if (predicate.getOp().isEquivalence() && column.isKey()) { Integer colIndex = table.getSchema().getColumnIndex(colName); primaryKeyColsInEqualPred.add(colIndex); } return true; } private KuduPredicate convertLocalTimestampBinaryKuduPredicate(Analyzer analyzer, ColumnSchema column, ComparisonOp op, LiteralExpr literal) throws AnalysisException, InternalException { Long preUnixTimeMicros = ExprUtil.localTimestampToUnixTimeMicros(analyzer, literal, true); Long postUnixTimeMicros = ExprUtil.localTimestampToUnixTimeMicros(analyzer, literal, false); // If the timestamp is not a valid local timestamp, EQUAL predicate should be always // false. For other comparison predicates, we could use the transition point time as // a common value for comparison. if (preUnixTimeMicros == null || postUnixTimeMicros == null) { if (preUnixTimeMicros == null) return null; // should not happen if (op == ComparisonOp.EQUAL) { // An empty IN LIST predicate is always false. return KuduPredicate.newInListPredicate(column, Lists.newArrayList()); } else { postUnixTimeMicros = preUnixTimeMicros; } } // If the timestamp is unique, create the predicate normally. if (preUnixTimeMicros.equals(postUnixTimeMicros)) { return KuduPredicate.newComparisonPredicate(column, op, preUnixTimeMicros); } // If the timestamp is ambiguous, we should convert EQUAL predicate to an IN LIST // predicate that include all ambiguous values. For comparison predicates, we need to // use a larger range of possible values for comparison to avoid missing rows. // Additionally, set currentPredicateNeedCheckAgain_ to true to indicate that the // predicate should not removed from the conjuncts_ list. switch (op) { case EQUAL: return KuduPredicate.newInListPredicate(column, Lists.newArrayList(preUnixTimeMicros, postUnixTimeMicros)); case LESS: case LESS_EQUAL: { currentPredicateNeedCheckAgain_ = true; return KuduPredicate.newComparisonPredicate(column, op, postUnixTimeMicros); } case GREATER: case GREATER_EQUAL: { currentPredicateNeedCheckAgain_ = true; return KuduPredicate.newComparisonPredicate(column, op, preUnixTimeMicros); } default: throw new InternalException("Unexpected operator: " + op); } } /** * If the InList 'expr' can be converted to a KuduPredicate, returns true and updates * kuduPredicates_ and kuduConjuncts_. */ private boolean tryConvertInListKuduPredicate(Analyzer analyzer, org.apache.kudu.client.KuduTable table, Expr expr) { if (!(expr instanceof InPredicate)) return false; InPredicate predicate = (InPredicate) expr; // Only convert IN predicates, i.e. cannot convert NOT IN. if (predicate.isNotIn()) return false; // Do not convert if there is an implicit cast. if (!(predicate.getChild(0) instanceof SlotRef)) return false; SlotRef ref = (SlotRef) predicate.getChild(0); // KuduPredicate takes a list of values as Objects. List<Object> values = new ArrayList<>(); for (int i = 1; i < predicate.getChildren().size(); ++i) { if (!Expr.IS_LITERAL.apply(predicate.getChild(i))) return false; LiteralExpr literal = (LiteralExpr) predicate.getChild(i); // Cannot push predicates with null literal values (KUDU-1595). if (Expr.IS_NULL_LITERAL.apply(literal)) return false; Object value = getKuduInListValue(analyzer, literal); if (value == null) return false; if (value instanceof List) { values.addAll((List<?>) value); } else { values.add(value); } } String colName = ((KuduColumn) ref.getDesc().getColumn()).getKuduName(); ColumnSchema column = table.getSchema().getColumn(colName); kuduPredicates_.add(KuduPredicate.newInListPredicate(column, values)); kuduConjuncts_.add(predicate); return true; } /** * If IS NULL/IS NOT NULL 'expr' can be converted to a KuduPredicate, * returns true and updates kuduPredicates_ and kuduConjuncts_. */ private boolean tryConvertIsNullKuduPredicate(Analyzer analyzer, org.apache.kudu.client.KuduTable table, Expr expr) { if (!(expr instanceof IsNullPredicate)) return false; IsNullPredicate predicate = (IsNullPredicate) expr; // Do not convert if expression is more than a SlotRef // This is true even for casts, as certain casts can take a non-NULL // value and produce a NULL. For example, CAST('test' as tinyint) // is NULL. if (!(predicate.getChild(0) instanceof SlotRef)) return false; SlotRef ref = (SlotRef) predicate.getChild(0); String colName = ((KuduColumn) ref.getDesc().getColumn()).getKuduName(); ColumnSchema column = table.getSchema().getColumn(colName); KuduPredicate kuduPredicate = null; if (predicate.isNotNull()) { kuduPredicate = KuduPredicate.newIsNotNullPredicate(column); } else { kuduPredicate = KuduPredicate.newIsNullPredicate(column); } kuduConjuncts_.add(predicate); kuduPredicates_.add(kuduPredicate); return true; } /** * Return the value of the InList child expression 'e' as an Object that can be * added to a KuduPredicate. If the Expr is not supported by Kudu or the type doesn't * match the expected PrimitiveType 'type', null is returned. * Additionally, if the query option 'convert_kudu_utc_timestamps' is enabled and when * the expression 'e' is converted from a local timestamp to a UTC timestamp, it is * invalid or ambiguous, the method will return either an empty list or a list * containing two ambiguous values. */ private static Object getKuduInListValue(Analyzer analyzer, LiteralExpr e) { switch (e.getType().getPrimitiveType()) { case BOOLEAN: return ((BoolLiteral) e).getValue(); case TINYINT: return (byte) ((NumericLiteral) e).getLongValue(); case SMALLINT: return (short) ((NumericLiteral) e).getLongValue(); case INT: return (int) ((NumericLiteral) e).getLongValue(); case BIGINT: return ((NumericLiteral) e).getLongValue(); case FLOAT: return (float) ((NumericLiteral) e).getDoubleValue(); case DOUBLE: return ((NumericLiteral) e).getDoubleValue(); case STRING: return ((StringLiteral) e).getUnescapedValue(); case TIMESTAMP: { try { // TODO: Simplify when Impala supports a 64-bit TIMESTAMP type. if (analyzer.getQueryOptions().isConvert_kudu_utc_timestamps()) { Long preUnixTimeMicros = ExprUtil.localTimestampToUnixTimeMicros(analyzer, e, true); Long postUnixTimeMicros = ExprUtil.localTimestampToUnixTimeMicros(analyzer, e, false); // If the timestamp is invalid in local time, return empty list. if (preUnixTimeMicros == null || postUnixTimeMicros == null) { if (preUnixTimeMicros == null) return null; // should not happen return Lists.newArrayList(); } // If the timestamp is unique, return the unique value. if (preUnixTimeMicros.equals(postUnixTimeMicros)) return preUnixTimeMicros; // If the timestamp is ambiguous, return a list of the two possible values. return Lists.newArrayList(preUnixTimeMicros, postUnixTimeMicros); } return ExprUtil.utcTimestampToUnixTimeMicros(analyzer, e); } catch (Exception ex) { LOG.info("Exception converting Kudu timestamp expr: " + e.toSql(), ex); } break; } case DECIMAL: return ((NumericLiteral) e).getValue(); default: Preconditions.checkState(false, "Unsupported Kudu type considered for predicate: %s", e.getType().toSql()); } return null; } /** * Returns a Kudu comparison operator for the BinaryPredicate operator, or null if * the operation is not supported by Kudu. */ private static KuduPredicate.ComparisonOp getKuduOperator(BinaryPredicate.Operator op) { switch (op) { case GT: return ComparisonOp.GREATER; case LT: return ComparisonOp.LESS; case GE: return ComparisonOp.GREATER_EQUAL; case LE: return ComparisonOp.LESS_EQUAL; case EQ: return ComparisonOp.EQUAL; default: return null; } } @Override public boolean hasStorageLayerConjuncts() { return !kuduConjuncts_.isEmpty(); } }
apache/ignite
35,344
modules/core/src/main/java/org/apache/ignite/internal/processors/odbc/odbc/OdbcRequestHandler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.odbc.odbc; import java.sql.BatchUpdateException; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.PriorityQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import javax.cache.configuration.Factory; import org.apache.ignite.IgniteException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cache.query.FieldsQueryCursor; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.internal.GridKernalContext; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.binary.BinaryWriterEx; import org.apache.ignite.internal.binary.GridBinaryMarshaller; import org.apache.ignite.internal.processors.cache.query.IgniteQueryErrorCode; import org.apache.ignite.internal.processors.cache.query.SqlFieldsQueryEx; import org.apache.ignite.internal.processors.odbc.ClientListenerProtocolVersion; import org.apache.ignite.internal.processors.odbc.ClientListenerRequest; import org.apache.ignite.internal.processors.odbc.ClientListenerRequestHandler; import org.apache.ignite.internal.processors.odbc.ClientListenerResponse; import org.apache.ignite.internal.processors.odbc.ClientListenerResponseSender; import org.apache.ignite.internal.processors.odbc.SqlListenerUtils; import org.apache.ignite.internal.processors.odbc.jdbc.JdbcParameterMeta; import org.apache.ignite.internal.processors.odbc.odbc.escape.OdbcEscapeUtils; import org.apache.ignite.internal.processors.query.GridQueryFieldMetadata; import org.apache.ignite.internal.processors.query.GridQueryProperty; import org.apache.ignite.internal.processors.query.GridQueryTypeDescriptor; import org.apache.ignite.internal.processors.query.IgniteSQLException; import org.apache.ignite.internal.processors.query.QueryUtils; import org.apache.ignite.internal.processors.query.SqlClientContext; import org.apache.ignite.internal.util.GridSpinBusyLock; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.X; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.internal.util.worker.GridWorker; import org.apache.ignite.lang.IgniteBiTuple; import org.jetbrains.annotations.Nullable; import static java.sql.ResultSetMetaData.columnNoNulls; import static java.sql.ResultSetMetaData.columnNullable; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_COLS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_PARAMS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_RESULTSET; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.META_TBLS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.MORE_RESULTS; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_CLOSE; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_EXEC; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_EXEC_BATCH; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.QRY_FETCH; import static org.apache.ignite.internal.processors.odbc.odbc.OdbcRequest.STREAMING_BATCH; /** * SQL query handler. */ public class OdbcRequestHandler implements ClientListenerRequestHandler { /** Query ID sequence. */ private static final AtomicLong QRY_ID_GEN = new AtomicLong(); /** Kernel context. */ private final GridKernalContext ctx; /** Client context. */ private final SqlClientContext cliCtx; /** Logger. */ private final IgniteLogger log; /** Busy lock. */ private final GridSpinBusyLock busyLock; /** Worker. */ private final OdbcRequestHandlerWorker worker; /** Maximum allowed cursors. */ private final int maxCursors; /** Current queries cursors. */ private final ConcurrentHashMap<Long, OdbcQueryResults> qryResults = new ConcurrentHashMap<>(); /** Client version. */ private ClientListenerProtocolVersion ver; /** Ordered batches queue. */ private final PriorityQueue<OdbcStreamingBatchRequest> orderedBatchesQueue = new PriorityQueue<>(); /** Ordered batches mutex. */ private final Object orderedBatchesMux = new Object(); /** Response sender. */ private final ClientListenerResponseSender sender; /** Connection context. */ private final OdbcConnectionContext connCtx; /** * Constructor. * @param ctx Context. * @param busyLock Shutdown latch. * @param sender Results sender. * @param maxCursors Maximum allowed cursors. * @param distributedJoins Distributed joins flag. * @param enforceJoinOrder Enforce join order flag. * @param replicatedOnly Replicated only flag. * @param collocated Collocated flag. * @param lazy Lazy flag. * @param skipReducerOnUpdate Skip reducer on update flag. * @param qryEngine Name of SQL query engine to use. * @param ver Client protocol version. */ public OdbcRequestHandler( GridKernalContext ctx, GridSpinBusyLock busyLock, ClientListenerResponseSender sender, int maxCursors, boolean distributedJoins, boolean enforceJoinOrder, boolean replicatedOnly, boolean collocated, boolean lazy, boolean skipReducerOnUpdate, @Nullable String qryEngine, ClientListenerProtocolVersion ver, OdbcConnectionContext connCtx) { this.ctx = ctx; this.connCtx = connCtx; Factory<GridWorker> orderedFactory = new Factory<GridWorker>() { @Override public GridWorker create() { return new OrderedBatchWorker(); } }; this.cliCtx = new SqlClientContext( ctx, orderedFactory, distributedJoins, enforceJoinOrder, collocated, replicatedOnly, lazy, skipReducerOnUpdate, null, null, qryEngine, null, null, 0, null ); this.busyLock = busyLock; this.sender = sender; this.maxCursors = maxCursors; this.ver = ver; log = ctx.log(getClass()); // TODO IGNITE-9484 Do not create worker if there is a possibility to unbind TX from threads. worker = new OdbcRequestHandlerWorker(ctx.igniteInstanceName(), log, this, ctx); } /** {@inheritDoc} */ @Override public ClientListenerResponse handle(ClientListenerRequest req) { assert req != null; assert req instanceof OdbcRequest; return doHandle((OdbcRequest)req); } /** * Start worker, if it's present. */ void start() { if (worker != null) worker.start(); } /** * Handle ODBC request. * @param req ODBC request. * @return Response. */ public ClientListenerResponse doHandle(OdbcRequest req) { if (!busyLock.enterBusy()) return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Failed to handle ODBC request because node is stopping: " + req); try { switch (req.command()) { case QRY_EXEC: return executeQuery((OdbcQueryExecuteRequest)req); case QRY_EXEC_BATCH: return executeBatchQuery((OdbcQueryExecuteBatchRequest)req); case STREAMING_BATCH: return dispatchBatchOrdered((OdbcStreamingBatchRequest)req); case QRY_FETCH: return fetchQuery((OdbcQueryFetchRequest)req); case QRY_CLOSE: return closeQuery((OdbcQueryCloseRequest)req); case META_COLS: return getColumnsMeta((OdbcQueryGetColumnsMetaRequest)req); case META_TBLS: return getTablesMeta((OdbcQueryGetTablesMetaRequest)req); case META_PARAMS: return getParamsMeta((OdbcQueryGetParamsMetaRequest)req); case META_RESULTSET: return getResultMeta((OdbcQueryGetResultsetMetaRequest)req); case MORE_RESULTS: return moreResults((OdbcQueryMoreResultsRequest)req); } return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Unsupported ODBC request: " + req); } finally { busyLock.leaveBusy(); } } /** {@inheritDoc} */ @Override public ClientListenerResponse handleException(Throwable e, ClientListenerRequest req) { return exceptionToResult(e); } /** {@inheritDoc} */ @Override public void writeHandshake(BinaryWriterEx writer) { writer.writeBoolean(true); } /** * Called whenever client is disconnected due to correct connection close * or due to {@code IOException} during network operations. */ public void onDisconnect() { if (busyLock.enterBusy()) { if (worker != null) { worker.cancel(); try { worker.join(); } catch (InterruptedException e) { // No-op. } } try { for (OdbcQueryResults res : qryResults.values()) res.closeAll(); U.close(cliCtx, log); } finally { busyLock.leaveBusy(); } } } /** {@inheritDoc} */ @Override public boolean isCancellationCommand(int cmdId) { return false; } /** {@inheritDoc} */ @Override public boolean isCancellationSupported() { return false; } /** {@inheritDoc} */ @Override public void registerRequest(long reqId, int cmdType) { // No-op. } /** {@inheritDoc} */ @Override public void unregisterRequest(long reqId) { // No-op. } /** {@inheritDoc} */ @Override public ClientListenerProtocolVersion protocolVersion() { return ver; } /** * Make query considering handler configuration. * @param schema Schema. * @param sql SQL request. * @param args Arguments. * @param autoCommit Autocommit transaction. * @param timeout Query timeout. * @return Query instance. */ private SqlFieldsQueryEx makeQuery(String schema, String sql, Object[] args, int timeout, boolean autoCommit) { SqlFieldsQueryEx qry = makeQuery(schema, sql); qry.setArgs(args); qry.setAutoCommit(autoCommit); QueryUtils.withQueryTimeout(qry, timeout, TimeUnit.SECONDS); return qry; } /** * Make query considering handler configuration. * @param schema Schema. * @param sql SQL request. * @return Query instance. */ private SqlFieldsQueryEx makeQuery(String schema, String sql) { SqlFieldsQueryEx qry = new SqlFieldsQueryEx(sql, null); qry.setDistributedJoins(cliCtx.isDistributedJoins()); qry.setEnforceJoinOrder(cliCtx.isEnforceJoinOrder()); qry.setReplicatedOnly(cliCtx.isReplicatedOnly()); qry.setCollocated(cliCtx.isCollocated()); qry.setLazy(cliCtx.isLazy()); qry.setSchema(OdbcUtils.prepareSchema(schema)); qry.setSkipReducerOnUpdate(cliCtx.isSkipReducerOnUpdate()); qry.setQueryInitiatorId(connCtx.clientDescriptor()); return qry; } /** * {@link OdbcQueryExecuteRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse executeQuery(OdbcQueryExecuteRequest req) { int cursorCnt = qryResults.size(); if (maxCursors > 0 && cursorCnt >= maxCursors) return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Too many open cursors (either close " + "other open cursors or increase the limit through " + "ClientConnectorConfiguration.maxOpenCursorsPerConnection) [maximum=" + maxCursors + ", current=" + cursorCnt + ']'); long qryId = QRY_ID_GEN.getAndIncrement(); assert !cliCtx.isStream(); try { String sql = OdbcEscapeUtils.parse(req.sqlQuery()); if (log.isDebugEnabled()) log.debug("ODBC query parsed [reqId=" + req.requestId() + ", original=" + req.sqlQuery() + ", parsed=" + sql + ']'); SqlFieldsQuery qry = makeQuery(req.schema(), sql, req.arguments(), req.timeout(), req.autoCommit()); List<FieldsQueryCursor<List<?>>> cursors = ctx.query().querySqlFields(null, qry, cliCtx, true, false); OdbcQueryResults results = new OdbcQueryResults(cursors, ver); Collection<OdbcColumnMeta> fieldsMeta; OdbcResultSet set = results.currentResultSet(); if (set == null) fieldsMeta = new ArrayList<>(); else { fieldsMeta = set.fieldsMeta(); if (log.isDebugEnabled()) { for (OdbcColumnMeta meta : fieldsMeta) log.debug("Meta - " + meta.toString()); } } if (!results.hasUnfetchedRows()) results.closeAll(); else qryResults.put(qryId, results); OdbcQueryExecuteResult res = new OdbcQueryExecuteResult(qryId, fieldsMeta, results.rowsAffected()); return new OdbcResponse(res); } catch (Exception e) { qryResults.remove(qryId); U.error(log, "Failed to execute SQL query [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryExecuteBatchRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse executeBatchQuery(OdbcQueryExecuteBatchRequest req) { try { String sql = OdbcEscapeUtils.parse(req.sqlQuery()); if (log.isDebugEnabled()) log.debug("ODBC query parsed [reqId=" + req.requestId() + ", original=" + req.sqlQuery() + ", parsed=" + sql + ']'); SqlFieldsQueryEx qry = makeQuery(req.schema(), sql, null, req.timeout(), req.autoCommit()); Object[][] paramSet = req.arguments(); if (paramSet.length <= 0) throw new IgniteException("Batch execute request with non-positive batch length. [len=" + paramSet.length + ']'); // Getting meta and do the checks for the first execution. for (Object[] set : paramSet) qry.addBatchedArgs(set); List<FieldsQueryCursor<List<?>>> qryCurs = ctx.query().querySqlFields(null, qry, cliCtx, true, true); long[] rowsAffected = new long[req.arguments().length]; for (int i = 0; i < qryCurs.size(); ++i) rowsAffected[i] = OdbcUtils.rowsAffected(qryCurs.get(i)); OdbcQueryExecuteBatchResult res = new OdbcQueryExecuteBatchResult(rowsAffected); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to execute SQL query [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToBatchResult(e); } } /** * @param req Ordered batch request. * @return Response. */ private ClientListenerResponse dispatchBatchOrdered(OdbcStreamingBatchRequest req) { if (!cliCtx.isStreamOrdered()) processStreamingBatchOrdered(req); else { synchronized (orderedBatchesMux) { orderedBatchesQueue.add(req); orderedBatchesMux.notifyAll(); } } return null; } /** * @param req Ordered batch request. */ private void processStreamingBatchOrdered(OdbcStreamingBatchRequest req) { try { if (req.last()) cliCtx.waitTotalProcessedOrderedRequests(req.order()); sender.send(processStreamingBatch(req)); } catch (Exception e) { U.error(null, "Error processing file batch", e); sender.send(new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Server error: " + e)); } cliCtx.orderedRequestProcessed(); } /** * @param req Request. * @return Response. */ private ClientListenerResponse processStreamingBatch(OdbcStreamingBatchRequest req) { assert cliCtx.isStream(); // Send back only the first error. Others will be written to the log. IgniteBiTuple<Integer, String> firstErr = new IgniteBiTuple<>(); SqlFieldsQueryEx qry = null; for (OdbcQuery q : req.queries()) { if (q.sql() != null) { // If we have a new query string in the batch, if (qry != null) // then execute the previous sub-batch and create a new SqlFieldsQueryEx. processStreamingBatch(qry, firstErr); qry = makeQuery(req.schemaName(), q.sql()); } assert qry != null; qry.addBatchedArgs(q.args()); } if (qry != null) processStreamingBatch(qry, firstErr); if (req.last()) cliCtx.disableStreaming(); if (firstErr.isEmpty()) return new OdbcResponse(new OdbcStreamingBatchResult(req.order())); else { assert firstErr.getKey() != null; return new OdbcResponse(new OdbcStreamingBatchResult(firstErr.getKey(), firstErr.getValue(), req.order())); } } /** * Executes query and updates result counters. * * @param qry Query. * @param err First error data - code and message. */ private void processStreamingBatch(SqlFieldsQueryEx qry, IgniteBiTuple<Integer, String> err) { try { assert cliCtx.isStream(); ctx.query().streamBatchedUpdateQuery( OdbcUtils.prepareSchema(qry.getSchema()), cliCtx, qry.getSql(), qry.batchedArguments(), connCtx.clientDescriptor() ); } catch (Exception e) { U.error(log, "Failed to execute batch query [qry=" + qry + ']', e); extractBatchError(e, null, err); } } /** * {@link OdbcQueryCloseRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse closeQuery(OdbcQueryCloseRequest req) { long qryId = req.queryId(); try { OdbcQueryResults results = qryResults.get(qryId); if (results == null) return new OdbcResponse(IgniteQueryErrorCode.UNKNOWN, "Failed to find query with ID: " + qryId); CloseCursor(results, qryId); OdbcQueryCloseResult res = new OdbcQueryCloseResult(qryId); return new OdbcResponse(res); } catch (Exception e) { qryResults.remove(qryId); U.error(log, "Failed to close SQL query [reqId=" + req.requestId() + ", req=" + qryId + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryFetchRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse fetchQuery(OdbcQueryFetchRequest req) { try { long qryId = req.queryId(); OdbcQueryResults results = qryResults.get(qryId); if (results == null) return new OdbcResponse(ClientListenerResponse.STATUS_FAILED, "Failed to find query with ID: " + qryId); OdbcResultSet set = results.currentResultSet(); List<Object> items = set.fetch(req.pageSize()); boolean lastPage = !set.hasUnfetchedRows(); // Automatically closing cursor if no more data is available. if (!results.hasUnfetchedRows()) CloseCursor(results, qryId); OdbcQueryFetchResult res = new OdbcQueryFetchResult(qryId, items, lastPage); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to fetch SQL query result [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetColumnsMetaRequest} command handler. * * @param req Get columns metadata request. * @return Response. */ private ClientListenerResponse getColumnsMeta(OdbcQueryGetColumnsMetaRequest req) { try { List<OdbcColumnMeta> meta = new ArrayList<>(); String schemaPattern; String tablePattern; if (req.tablePattern().contains(".")) { // Parsing two-part table name. String[] parts = req.tablePattern().split("\\."); schemaPattern = parts[0]; tablePattern = parts[1]; } else { schemaPattern = req.schemaPattern(); tablePattern = req.tablePattern(); } schemaPattern = OdbcUtils.removeQuotationMarksIfNeeded(schemaPattern); for (String cacheName : ctx.cache().publicCacheNames()) { for (GridQueryTypeDescriptor table : ctx.query().types(cacheName)) { if (!matches(table.schemaName(), schemaPattern) || !matches(table.tableName(), tablePattern)) continue; for (Map.Entry<String, Class<?>> field : table.fields().entrySet()) { if (!matches(field.getKey(), req.columnPattern())) continue; GridQueryProperty prop = table.property(field.getKey()); OdbcColumnMeta columnMeta = new OdbcColumnMeta(table.schemaName(), table.tableName(), field.getKey(), field.getValue(), prop.precision(), prop.scale(), prop.notNull() ? columnNoNulls : columnNullable); if (!meta.contains(columnMeta)) meta.add(columnMeta); } } } OdbcQueryGetColumnsMetaResult res = new OdbcQueryGetColumnsMetaResult(meta); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get columns metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetTablesMetaRequest} command handler. * * @param req Get tables metadata request. * @return Response. */ private ClientListenerResponse getTablesMeta(OdbcQueryGetTablesMetaRequest req) { try { List<OdbcTableMeta> meta = new ArrayList<>(); String schemaPattern = OdbcUtils.removeQuotationMarksIfNeeded(req.schema()); for (String cacheName : ctx.cache().publicCacheNames()) { for (GridQueryTypeDescriptor table : ctx.query().types(cacheName)) { if (!matches(table.schemaName(), schemaPattern) || !matches(table.tableName(), req.table()) || !matchesTableType("TABLE", req.tableType())) continue; OdbcTableMeta tableMeta = new OdbcTableMeta(null, table.schemaName(), table.tableName(), "TABLE"); if (!meta.contains(tableMeta)) meta.add(tableMeta); } } OdbcQueryGetTablesMetaResult res = new OdbcQueryGetTablesMetaResult(meta); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get tables metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetQueryMetaRequest} command handler. * Returns metadata for the parameters to be set. * * @param req Get params metadata request. * @return Response. */ private ClientListenerResponse getParamsMeta(OdbcQueryGetParamsMetaRequest req) { try { String sql = OdbcEscapeUtils.parse(req.query()); String schema = OdbcUtils.prepareSchema(req.schema()); SqlFieldsQueryEx qry = makeQuery(schema, sql); List<JdbcParameterMeta> params = ctx.query().parameterMetaData(qry, cliCtx); byte[] typeIds = new byte[params.size()]; for (int i = 0; i < params.size(); ++i) { int sqlType = params.get(i).type(); typeIds[i] = sqlTypeToBinary(sqlType); } OdbcQueryGetParamsMetaResult res = new OdbcQueryGetParamsMetaResult(typeIds); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get params metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryGetQueryMetaRequest} command handler. * Returns metadata for a columns of the result set. * * @param req Get resultset metadata request. * @return Response. */ private ClientListenerResponse getResultMeta(OdbcQueryGetResultsetMetaRequest req) { try { String sql = OdbcEscapeUtils.parse(req.query()); String schema = OdbcUtils.prepareSchema(req.schema()); SqlFieldsQueryEx qry = makeQuery(schema, sql); List<GridQueryFieldMetadata> columns = ctx.query().resultSetMetaData(qry, cliCtx); Collection<OdbcColumnMeta> meta = OdbcUtils.convertMetadata(columns); OdbcQueryGetResultsetMetaResult res = new OdbcQueryGetResultsetMetaResult(meta); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get resultset metadata [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * {@link OdbcQueryMoreResultsRequest} command handler. * * @param req Execute query request. * @return Response. */ private ClientListenerResponse moreResults(OdbcQueryMoreResultsRequest req) { try { long qryId = req.queryId(); OdbcQueryResults results = qryResults.get(qryId); if (results == null) return new OdbcResponse(ClientListenerResponse.STATUS_FAILED, "Failed to find query with ID: " + qryId); results.nextResultSet(); OdbcResultSet set = results.currentResultSet(); List<Object> items = set.fetch(req.pageSize()); boolean lastPage = !set.hasUnfetchedRows(); // Automatically closing cursor if no more data is available. if (!results.hasUnfetchedRows()) CloseCursor(results, qryId); OdbcQueryMoreResultsResult res = new OdbcQueryMoreResultsResult(qryId, items, lastPage); return new OdbcResponse(res); } catch (Exception e) { U.error(log, "Failed to get more SQL query results [reqId=" + req.requestId() + ", req=" + req + ']', e); return exceptionToResult(e); } } /** * Close cursor. * @param results Query map element. * @param queryId Query ID. */ private void CloseCursor(OdbcQueryResults results, long queryId) { assert (results != null); results.closeAll(); qryResults.remove(queryId); } /** * Convert {@link java.sql.Types} to binary type constant (See {@link GridBinaryMarshaller} constants). * * @param sqlType SQL type. * @return Binary type. */ private static byte sqlTypeToBinary(int sqlType) { switch (sqlType) { case Types.BIGINT: return GridBinaryMarshaller.LONG; case Types.BOOLEAN: return GridBinaryMarshaller.BOOLEAN; case Types.DATE: return GridBinaryMarshaller.DATE; case Types.DOUBLE: return GridBinaryMarshaller.DOUBLE; case Types.FLOAT: case Types.REAL: return GridBinaryMarshaller.FLOAT; case Types.NUMERIC: case Types.DECIMAL: return GridBinaryMarshaller.DECIMAL; case Types.INTEGER: return GridBinaryMarshaller.INT; case Types.SMALLINT: return GridBinaryMarshaller.SHORT; case Types.TIME: return GridBinaryMarshaller.TIME; case Types.TIMESTAMP: return GridBinaryMarshaller.TIMESTAMP; case Types.TINYINT: return GridBinaryMarshaller.BYTE; case Types.CHAR: case Types.VARCHAR: case Types.LONGNVARCHAR: return GridBinaryMarshaller.STRING; case Types.NULL: return GridBinaryMarshaller.NULL; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: default: return GridBinaryMarshaller.BYTE_ARR; } } /** * Checks whether string matches table type pattern. * * @param str String. * @param ptrn Pattern. * @return Whether string matches pattern. */ private static boolean matchesTableType(String str, String ptrn) { if (F.isEmpty(ptrn)) return true; if (str == null) return false; String pattern = OdbcUtils.preprocessPattern(ptrn); String[] types = pattern.split(","); for (String type0 : types) { String type = OdbcUtils.removeQuotationMarksIfNeeded(type0.trim()); if (str.toUpperCase().matches(type)) return true; } return false; } /** * Checks whether string matches SQL pattern. * * @param str String. * @param ptrn Pattern. * @return Whether string matches pattern. */ private static boolean matches(String str, String ptrn) { if (F.isEmpty(ptrn)) return true; if (str == null) return false; String pattern = OdbcUtils.preprocessPattern(ptrn); return str.toUpperCase().matches(pattern); } /** * Create {@link OdbcResponse} bearing appropriate Ignite specific result code if possible * from given {@link Exception}. * * @param e Exception to convert. * @return resulting {@link OdbcResponse}. */ private static OdbcResponse exceptionToBatchResult(Exception e) { IgniteBiTuple<Integer, String> err = new IgniteBiTuple<>(); List<Long> rowsAffected = new ArrayList<>(); extractBatchError(e, rowsAffected, err); OdbcQueryExecuteBatchResult res = new OdbcQueryExecuteBatchResult( U.toLongArray(rowsAffected), -1, err.get1(), err.get2()); return new OdbcResponse(res); } /** * Extract batching error from general exception. * @param e Exception * @param rowsAffected List containing the number of affected rows for every query in batch. * @param err Error tuple containing error code and error message. */ private static void extractBatchError(Exception e, List<Long> rowsAffected, IgniteBiTuple<Integer, String> err) { if (e instanceof IgniteSQLException) { BatchUpdateException batchCause = X.cause(e, BatchUpdateException.class); if (batchCause != null) { if (rowsAffected != null) { for (long cnt : batchCause.getLargeUpdateCounts()) rowsAffected.add(cnt); } err.set(batchCause.getErrorCode(), batchCause.getMessage()); } else err.set(((IgniteSQLException)e).statusCode(), OdbcUtils.tryRetrieveH2ErrorMessage(e)); } else err.set(IgniteQueryErrorCode.UNKNOWN, e.getMessage()); } /** * Create {@link OdbcResponse} bearing appropriate Ignite specific result code if possible * from given {@link Exception}. * * @param e Exception to convert. * @return resulting {@link OdbcResponse}. */ private static OdbcResponse exceptionToResult(Throwable e) { String msg = OdbcUtils.tryRetrieveH2ErrorMessage(e); int errorCode = SqlListenerUtils.exceptionToSqlErrorCode(e); return new OdbcResponse(errorCode, msg); } /** * Ordered batch worker. */ private class OrderedBatchWorker extends GridWorker { /** * Constructor. */ OrderedBatchWorker() { super(ctx.igniteInstanceName(), "ordered-batch", OdbcRequestHandler.this.log); } /** {@inheritDoc} */ @Override protected void body() throws InterruptedException, IgniteInterruptedCheckedException { long nextBatchOrder = 0; while (true) { if (!cliCtx.isStream()) return; OdbcStreamingBatchRequest req; synchronized (orderedBatchesMux) { req = orderedBatchesQueue.peek(); if (req == null || req.order() != nextBatchOrder) { orderedBatchesMux.wait(); continue; } else orderedBatchesQueue.poll(); } processStreamingBatchOrdered(req); nextBatchOrder++; } } } }
google/j2objc
35,179
xalan/third_party/android/platform/external/apache-xml/src/main/java/org/apache/xpath/NodeSetDTM.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * $Id: NodeSetDTM.java 468655 2006-10-28 07:12:06Z minchau $ */ package org.apache.xpath; import org.apache.xalan.res.XSLMessages; import org.apache.xml.dtm.DTM; import org.apache.xml.dtm.DTMFilter; import org.apache.xml.dtm.DTMIterator; import org.apache.xml.dtm.DTMManager; import org.apache.xml.utils.NodeVector; import org.apache.xpath.res.XPATHErrorResources; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.traversal.NodeIterator; /** * <p>The NodeSetDTM class can act as either a NodeVector, * NodeList, or NodeIterator. However, in order for it to * act as a NodeVector or NodeList, it's required that * setShouldCacheNodes(true) be called before the first * nextNode() is called, in order that nodes can be added * as they are fetched. Derived classes that implement iterators * must override runTo(int index), in order that they may * run the iteration to the given index. </p> * * <p>Note that we directly implement the DOM's NodeIterator * interface. We do not emulate all the behavior of the * standard NodeIterator. In particular, we do not guarantee * to present a "live view" of the document ... but in XSLT, * the source document should never be mutated, so this should * never be an issue.</p> * * <p>Thought: Should NodeSetDTM really implement NodeList and NodeIterator, * or should there be specific subclasses of it which do so? The * advantage of doing it all here is that all NodeSetDTMs will respond * to the same calls; the disadvantage is that some of them may return * less-than-enlightening results when you do so.</p> * @xsl.usage advanced */ public class NodeSetDTM extends NodeVector implements /* NodeList, NodeIterator, */ DTMIterator, Cloneable { static final long serialVersionUID = 7686480133331317070L; /** * Create an empty nodelist. */ public NodeSetDTM(DTMManager dtmManager) { super(); m_manager = dtmManager; } /** * Create an empty, using the given block size. * * @param blocksize Size of blocks to allocate * @param dummy pass zero for right now... */ public NodeSetDTM(int blocksize, int dummy, DTMManager dtmManager) { super(blocksize); m_manager = dtmManager; } // %TBD% // /** // * Create a NodeSetDTM, and copy the members of the // * given nodelist into it. // * // * @param nodelist List of Nodes to be made members of the new set. // */ // public NodeSetDTM(NodeList nodelist) // { // // super(); // // addNodes(nodelist); // } /** * Create a NodeSetDTM, and copy the members of the * given NodeSetDTM into it. * * @param nodelist Set of Nodes to be made members of the new set. */ public NodeSetDTM(NodeSetDTM nodelist) { super(); m_manager = nodelist.getDTMManager(); m_root = nodelist.getRoot(); addNodes((DTMIterator) nodelist); } /** * Create a NodeSetDTM, and copy the members of the * given DTMIterator into it. * * @param ni Iterator which yields Nodes to be made members of the new set. */ public NodeSetDTM(DTMIterator ni) { super(); m_manager = ni.getDTMManager(); m_root = ni.getRoot(); addNodes(ni); } /** * Create a NodeSetDTM, and copy the members of the * given DTMIterator into it. * * @param iterator Iterator which yields Nodes to be made members of the new set. */ public NodeSetDTM(NodeIterator iterator, XPathContext xctxt) { super(); Node node; m_manager = xctxt.getDTMManager(); while (null != (node = iterator.nextNode())) { int handle = xctxt.getDTMHandleFromNode(node); addNodeInDocOrder(handle, xctxt); } } /** * Create a NodeSetDTM, and copy the members of the * given DTMIterator into it. * */ public NodeSetDTM(NodeList nodeList, XPathContext xctxt) { super(); m_manager = xctxt.getDTMManager(); int n = nodeList.getLength(); for (int i = 0; i < n; i++) { Node node = nodeList.item(i); int handle = xctxt.getDTMHandleFromNode(node); // Do not reorder or strip duplicate nodes from the given DOM nodelist addNode(handle); // addNodeInDocOrder(handle, xctxt); } } /** * Create a NodeSetDTM which contains the given Node. * * @param node Single node to be added to the new set. */ public NodeSetDTM(int node, DTMManager dtmManager) { super(); m_manager = dtmManager; addNode(node); } /** * Set the environment in which this iterator operates, which should provide: * a node (the context node... same value as "root" defined below) * a pair of non-zero positive integers (the context position and the context size) * a set of variable bindings * a function library * the set of namespace declarations in scope for the expression. * * <p>At this time the exact implementation of this environment is application * dependent. Probably a proper interface will be created fairly soon.</p> * * @param environment The environment object. */ public void setEnvironment(Object environment) { // no-op } /** * @return The root node of the Iterator, as specified when it was created. * For non-Iterator NodeSetDTMs, this will be null. */ public int getRoot() { if(DTM.NULL == m_root) { if(size() > 0) return item(0); else return DTM.NULL; } else return m_root; } /** * Initialize the context values for this expression * after it is cloned. * * @param context The XPath runtime context for this * transformation. */ public void setRoot(int context, Object environment) { // no-op, I guess... (-sb) } /** * Clone this NodeSetDTM. * At this time, we only expect this to be used with LocPathIterators; * it may not work with other kinds of NodeSetDTMs. * * @return a new NodeSetDTM of the same type, having the same state... * though unless overridden in the subclasses, it may not copy all * the state information. * * @throws CloneNotSupportedException if this subclass of NodeSetDTM * does not support the clone() operation. */ public Object clone() throws CloneNotSupportedException { NodeSetDTM clone = (NodeSetDTM) super.clone(); return clone; } /** * Get a cloned Iterator, and reset its state to the beginning of the * iteration. * * @return a new NodeSetDTM of the same type, having the same state... * except that the reset() operation has been called. * * @throws CloneNotSupportedException if this subclass of NodeSetDTM * does not support the clone() operation. */ public DTMIterator cloneWithReset() throws CloneNotSupportedException { NodeSetDTM clone = (NodeSetDTM) clone(); clone.reset(); return clone; } /** * Reset the iterator. May have no effect on non-iterator Nodesets. */ public void reset() { m_next = 0; } /** * This attribute determines which node types are presented via the * iterator. The available set of constants is defined in the * <code>DTMFilter</code> interface. For NodeSetDTMs, the mask has been * hardcoded to show all nodes except EntityReference nodes, which have * no equivalent in the XPath data model. * * @return integer used as a bit-array, containing flags defined in * the DOM's DTMFilter class. The value will be * <code>SHOW_ALL & ~SHOW_ENTITY_REFERENCE</code>, meaning that * only entity references are suppressed. */ public int getWhatToShow() { return DTMFilter.SHOW_ALL & ~DTMFilter.SHOW_ENTITY_REFERENCE; } /** * The filter object used to screen nodes. Filters are applied to * further reduce (and restructure) the DTMIterator's view of the * document. In our case, we will be using hardcoded filters built * into our iterators... but getFilter() is part of the DOM's * DTMIterator interface, so we have to support it. * * @return null, which is slightly misleading. True, there is no * user-written filter object, but in fact we are doing some very * sophisticated custom filtering. A DOM purist might suggest * returning a placeholder object just to indicate that this is * not going to return all nodes selected by whatToShow. */ public DTMFilter getFilter() { return null; } /** * The value of this flag determines whether the children of entity * reference nodes are visible to the iterator. If false, they will be * skipped over. * <br> To produce a view of the document that has entity references * expanded and does not expose the entity reference node itself, use the * whatToShow flags to hide the entity reference node and set * expandEntityReferences to true when creating the iterator. To produce * a view of the document that has entity reference nodes but no entity * expansion, use the whatToShow flags to show the entity reference node * and set expandEntityReferences to false. * * @return true for all iterators based on NodeSetDTM, meaning that the * contents of EntityRefrence nodes may be returned (though whatToShow * says that the EntityReferences themselves are not shown.) */ public boolean getExpandEntityReferences() { return true; } /** * Get an instance of a DTM that "owns" a node handle. Since a node * iterator may be passed without a DTMManager, this allows the * caller to easily get the DTM using just the iterator. * * @param nodeHandle the nodeHandle. * * @return a non-null DTM reference. */ public DTM getDTM(int nodeHandle) { return m_manager.getDTM(nodeHandle); } /* An instance of the DTMManager. */ DTMManager m_manager; /** * Get an instance of the DTMManager. Since a node * iterator may be passed without a DTMManager, this allows the * caller to easily get the DTMManager using just the iterator. * * @return a non-null DTMManager reference. */ public DTMManager getDTMManager() { return m_manager; } /** * Returns the next node in the set and advances the position of the * iterator in the set. After a DTMIterator is created, the first call * to nextNode() returns the first node in the set. * @return The next <code>Node</code> in the set being iterated over, or * <code>DTM.NULL</code> if there are no more members in that set. * @throws DOMException * INVALID_STATE_ERR: Raised if this method is called after the * <code>detach</code> method was invoked. */ public int nextNode() { if ((m_next) < this.size()) { int next = this.elementAt(m_next); m_next++; return next; } else return DTM.NULL; } /** * Returns the previous node in the set and moves the position of the * iterator backwards in the set. * @return The previous <code>Node</code> in the set being iterated over, * or<code>DTM.NULL</code> if there are no more members in that set. * @throws DOMException * INVALID_STATE_ERR: Raised if this method is called after the * <code>detach</code> method was invoked. * @throws RuntimeException thrown if this NodeSetDTM is not of * a cached type, and hence doesn't know what the previous node was. */ public int previousNode() { if (!m_cacheNodes) throw new RuntimeException( XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_CANNOT_ITERATE, null)); //"This NodeSetDTM can not iterate to a previous node!"); if ((m_next - 1) > 0) { m_next--; return this.elementAt(m_next); } else return DTM.NULL; } /** * Detaches the iterator from the set which it iterated over, releasing * any computational resources and placing the iterator in the INVALID * state. After<code>detach</code> has been invoked, calls to * <code>nextNode</code> or<code>previousNode</code> will raise the * exception INVALID_STATE_ERR. * <p> * This operation is a no-op in NodeSetDTM, and will not cause * INVALID_STATE_ERR to be raised by later operations. * </p> */ public void detach(){} /** * Specify if it's OK for detach to release the iterator for reuse. * * @param allowRelease true if it is OK for detach to release this iterator * for pooling. */ public void allowDetachToRelease(boolean allowRelease) { // no action for right now. } /** * Tells if this NodeSetDTM is "fresh", in other words, if * the first nextNode() that is called will return the * first node in the set. * * @return true if nextNode() would return the first node in the set, * false if it would return a later one. */ public boolean isFresh() { return (m_next == 0); } /** * If an index is requested, NodeSetDTM will call this method * to run the iterator to the index. By default this sets * m_next to the index. If the index argument is -1, this * signals that the iterator should be run to the end. * * @param index Position to advance (or retreat) to, with * 0 requesting the reset ("fresh") position and -1 (or indeed * any out-of-bounds value) requesting the final position. * @throws RuntimeException thrown if this NodeSetDTM is not * one of the types which supports indexing/counting. */ public void runTo(int index) { if (!m_cacheNodes) throw new RuntimeException( XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_CANNOT_INDEX, null)); //"This NodeSetDTM can not do indexing or counting functions!"); if ((index >= 0) && (m_next < m_firstFree)) m_next = index; else m_next = m_firstFree - 1; } /** * Returns the <code>index</code>th item in the collection. If * <code>index</code> is greater than or equal to the number of nodes in * the list, this returns <code>null</code>. * * TODO: What happens if index is out of range? * * @param index Index into the collection. * @return The node at the <code>index</code>th position in the * <code>NodeList</code>, or <code>null</code> if that is not a valid * index. */ public int item(int index) { runTo(index); return this.elementAt(index); } /** * The number of nodes in the list. The range of valid child node indices is * 0 to <code>length-1</code> inclusive. Note that this operation requires * finding all the matching nodes, which may defeat attempts to defer * that work. * * @return integer indicating how many nodes are represented by this list. */ public int getLength() { runTo(-1); return this.size(); } /** * Add a node to the NodeSetDTM. Not all types of NodeSetDTMs support this * operation * * @param n Node to be added * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void addNode(int n) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); this.addElement(n); } /** * Insert a node at a given position. * * @param n Node to be added * @param pos Offset at which the node is to be inserted, * with 0 being the first position. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void insertNode(int n, int pos) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); insertElementAt(n, pos); } /** * Remove a node. * * @param n Node to be added * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void removeNode(int n) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); this.removeElement(n); } // %TBD% // /** // * Copy NodeList members into this nodelist, adding in // * document order. If a node is null, don't add it. // * // * @param nodelist List of nodes which should now be referenced by // * this NodeSetDTM. // * @throws RuntimeException thrown if this NodeSetDTM is not of // * a mutable type. // */ // public void addNodes(NodeList nodelist) // { // // if (!m_mutable) // throw new RuntimeException("This NodeSetDTM is not mutable!"); // // if (null != nodelist) // defensive to fix a bug that Sanjiva reported. // { // int nChildren = nodelist.getLength(); // // for (int i = 0; i < nChildren; i++) // { // int obj = nodelist.item(i); // // if (null != obj) // { // addElement(obj); // } // } // } // // // checkDups(); // } // %TBD% // /** // * <p>Copy NodeList members into this nodelist, adding in // * document order. Only genuine node references will be copied; // * nulls appearing in the source NodeSetDTM will // * not be added to this one. </p> // * // * <p> In case you're wondering why this function is needed: NodeSetDTM // * implements both DTMIterator and NodeList. If this method isn't // * provided, Java can't decide which of those to use when addNodes() // * is invoked. Providing the more-explicit match avoids that // * ambiguity.)</p> // * // * @param ns NodeSetDTM whose members should be merged into this NodeSetDTM. // * @throws RuntimeException thrown if this NodeSetDTM is not of // * a mutable type. // */ // public void addNodes(NodeSetDTM ns) // { // // if (!m_mutable) // throw new RuntimeException("This NodeSetDTM is not mutable!"); // // addNodes((DTMIterator) ns); // } /** * Copy NodeList members into this nodelist, adding in * document order. Null references are not added. * * @param iterator DTMIterator which yields the nodes to be added. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void addNodes(DTMIterator iterator) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); if (null != iterator) // defensive to fix a bug that Sanjiva reported. { int obj; while (DTM.NULL != (obj = iterator.nextNode())) { addElement(obj); } } // checkDups(); } // %TBD% // /** // * Copy NodeList members into this nodelist, adding in // * document order. If a node is null, don't add it. // * // * @param nodelist List of nodes to be added // * @param support The XPath runtime context. // * @throws RuntimeException thrown if this NodeSetDTM is not of // * a mutable type. // */ // public void addNodesInDocOrder(NodeList nodelist, XPathContext support) // { // // if (!m_mutable) // throw new RuntimeException("This NodeSetDTM is not mutable!"); // // int nChildren = nodelist.getLength(); // // for (int i = 0; i < nChildren; i++) // { // int node = nodelist.item(i); // // if (null != node) // { // addNodeInDocOrder(node, support); // } // } // } /** * Copy NodeList members into this nodelist, adding in * document order. If a node is null, don't add it. * * @param iterator DTMIterator which yields the nodes to be added. * @param support The XPath runtime context. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void addNodesInDocOrder(DTMIterator iterator, XPathContext support) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); int node; while (DTM.NULL != (node = iterator.nextNode())) { addNodeInDocOrder(node, support); } } // %TBD% // /** // * Add the node list to this node set in document order. // * // * @param start index. // * @param end index. // * @param testIndex index. // * @param nodelist The nodelist to add. // * @param support The XPath runtime context. // * // * @return false always. // * @throws RuntimeException thrown if this NodeSetDTM is not of // * a mutable type. // */ // private boolean addNodesInDocOrder(int start, int end, int testIndex, // NodeList nodelist, XPathContext support) // { // // if (!m_mutable) // throw new RuntimeException("This NodeSetDTM is not mutable!"); // // boolean foundit = false; // int i; // int node = nodelist.item(testIndex); // // for (i = end; i >= start; i--) // { // int child = elementAt(i); // // if (child == node) // { // i = -2; // Duplicate, suppress insert // // break; // } // // if (!support.getDOMHelper().isNodeAfter(node, child)) // { // insertElementAt(node, i + 1); // // testIndex--; // // if (testIndex > 0) // { // boolean foundPrev = addNodesInDocOrder(0, i, testIndex, nodelist, // support); // // if (!foundPrev) // { // addNodesInDocOrder(i, size() - 1, testIndex, nodelist, support); // } // } // // break; // } // } // // if (i == -1) // { // insertElementAt(node, 0); // } // // return foundit; // } /** * Add the node into a vector of nodes where it should occur in * document order. * @param node The node to be added. * @param test true if we should test for doc order * @param support The XPath runtime context. * @return insertIndex. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public int addNodeInDocOrder(int node, boolean test, XPathContext support) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); int insertIndex = -1; if (test) { // This needs to do a binary search, but a binary search // is somewhat tough because the sequence test involves // two nodes. int size = size(), i; for (i = size - 1; i >= 0; i--) { int child = elementAt(i); if (child == node) { i = -2; // Duplicate, suppress insert break; } DTM dtm = support.getDTM(node); if (!dtm.isNodeAfter(node, child)) { break; } } if (i != -2) { insertIndex = i + 1; insertElementAt(node, insertIndex); } } else { insertIndex = this.size(); boolean foundit = false; for (int i = 0; i < insertIndex; i++) { if (i == node) { foundit = true; break; } } if (!foundit) addElement(node); } // checkDups(); return insertIndex; } // end addNodeInDocOrder(Vector v, Object obj) /** * Add the node into a vector of nodes where it should occur in * document order. * @param node The node to be added. * @param support The XPath runtime context. * * @return The index where it was inserted. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public int addNodeInDocOrder(int node, XPathContext support) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); return addNodeInDocOrder(node, true, support); } // end addNodeInDocOrder(Vector v, Object obj) /** * Get the length of the list. * * @return The size of this node set. */ public int size() { return super.size(); } /** * Append a Node onto the vector. * * @param value The node to be added. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void addElement(int value) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.addElement(value); } /** * Inserts the specified node in this vector at the specified index. * Each component in this vector with an index greater or equal to * the specified index is shifted upward to have an index one greater * than the value it had previously. * * @param value The node to be inserted. * @param at The index where the insert should occur. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void insertElementAt(int value, int at) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.insertElementAt(value, at); } /** * Append the nodes to the list. * * @param nodes The nodes to be appended to this node set. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void appendNodes(NodeVector nodes) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.appendNodes(nodes); } /** * Inserts the specified node in this vector at the specified index. * Each component in this vector with an index greater or equal to * the specified index is shifted upward to have an index one greater * than the value it had previously. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void removeAllElements() { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.removeAllElements(); } /** * Removes the first occurrence of the argument from this vector. * If the object is found in this vector, each component in the vector * with an index greater or equal to the object's index is shifted * downward to have an index one smaller than the value it had * previously. * * @param s The node to be removed. * * @return True if the node was successfully removed * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public boolean removeElement(int s) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); return super.removeElement(s); } /** * Deletes the component at the specified index. Each component in * this vector with an index greater or equal to the specified * index is shifted downward to have an index one smaller than * the value it had previously. * * @param i The index of the node to be removed. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void removeElementAt(int i) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.removeElementAt(i); } /** * Sets the component at the specified index of this vector to be the * specified object. The previous component at that position is discarded. * * The index must be a value greater than or equal to 0 and less * than the current size of the vector. * * @param node The node to be set. * @param index The index of the node to be replaced. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void setElementAt(int node, int index) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.setElementAt(node, index); } /** * Same as setElementAt. * * @param node The node to be set. * @param index The index of the node to be replaced. * @throws RuntimeException thrown if this NodeSetDTM is not of * a mutable type. */ public void setItem(int node, int index) { if (!m_mutable) throw new RuntimeException(XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_NOT_MUTABLE, null)); //"This NodeSetDTM is not mutable!"); super.setElementAt(node, index); } /** * Get the nth element. * * @param i The index of the requested node. * * @return Node at specified index. */ public int elementAt(int i) { runTo(i); return super.elementAt(i); } /** * Tell if the table contains the given node. * * @param s Node to look for * * @return True if the given node was found. */ public boolean contains(int s) { runTo(-1); return super.contains(s); } /** * Searches for the first occurence of the given argument, * beginning the search at index, and testing for equality * using the equals method. * * @param elem Node to look for * @param index Index of where to start the search * @return the index of the first occurrence of the object * argument in this vector at position index or later in the * vector; returns -1 if the object is not found. */ public int indexOf(int elem, int index) { runTo(-1); return super.indexOf(elem, index); } /** * Searches for the first occurence of the given argument, * beginning the search at index, and testing for equality * using the equals method. * * @param elem Node to look for * @return the index of the first occurrence of the object * argument in this vector at position index or later in the * vector; returns -1 if the object is not found. */ public int indexOf(int elem) { runTo(-1); return super.indexOf(elem); } /** If this node is being used as an iterator, the next index that nextNode() * will return. */ transient protected int m_next = 0; /** * Get the current position, which is one less than * the next nextNode() call will retrieve. i.e. if * you call getCurrentPos() and the return is 0, the next * fetch will take place at index 1. * * @return The the current position index. */ public int getCurrentPos() { return m_next; } /** * Set the current position in the node set. * @param i Must be a valid index. * @throws RuntimeException thrown if this NodeSetDTM is not of * a cached type, and thus doesn't permit indexed access. */ public void setCurrentPos(int i) { if (!m_cacheNodes) throw new RuntimeException( XSLMessages.createXPATHMessage(XPATHErrorResources.ER_NODESETDTM_CANNOT_INDEX, null)); //"This NodeSetDTM can not do indexing or counting functions!"); m_next = i; } /** * Return the last fetched node. Needed to support the UnionPathIterator. * * @return the last fetched node. * @throws RuntimeException thrown if this NodeSetDTM is not of * a cached type, and thus doesn't permit indexed access. */ public int getCurrentNode() { if (!m_cacheNodes) throw new RuntimeException( "This NodeSetDTM can not do indexing or counting functions!"); int saved = m_next; // because nextNode always increments // But watch out for copy29, where the root iterator didn't // have nextNode called on it. int current = (m_next > 0) ? m_next-1 : m_next; int n = (current < m_firstFree) ? elementAt(current) : DTM.NULL; m_next = saved; // HACK: I think this is a bit of a hack. -sb return n; } /** True if this list can be mutated. */ transient protected boolean m_mutable = true; /** True if this list is cached. * @serial */ transient protected boolean m_cacheNodes = true; /** The root of the iteration, if available. */ protected int m_root = DTM.NULL; /** * Get whether or not this is a cached node set. * * * @return True if this list is cached. */ public boolean getShouldCacheNodes() { return m_cacheNodes; } /** * If setShouldCacheNodes(true) is called, then nodes will * be cached. They are not cached by default. This switch must * be set before the first call to nextNode is made, to ensure * that all nodes are cached. * * @param b true if this node set should be cached. * @throws RuntimeException thrown if an attempt is made to * request caching after we've already begun stepping through the * nodes in this set. */ public void setShouldCacheNodes(boolean b) { if (!isFresh()) throw new RuntimeException( XSLMessages.createXPATHMessage(XPATHErrorResources.ER_CANNOT_CALL_SETSHOULDCACHENODE, null)); //"Can not call setShouldCacheNodes after nextNode has been called!"); m_cacheNodes = b; m_mutable = true; } /** * Tells if this iterator can have nodes added to it or set via * the <code>setItem(int node, int index)</code> method. * * @return True if the nodelist can be mutated. */ public boolean isMutable() { return m_mutable; } transient private int m_last = 0; public int getLast() { return m_last; } public void setLast(int last) { m_last = last; } /** * Returns true if all the nodes in the iteration well be returned in document * order. * * @return true as a default. */ public boolean isDocOrdered() { return true; } /** * Returns the axis being iterated, if it is known. * * @return Axis.CHILD, etc., or -1 if the axis is not known or is of multiple * types. */ public int getAxis() { return -1; } }
googleapis/google-cloud-java
35,326
java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/UpdateSessionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1/conversational_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1; /** * * * <pre> * Request for UpdateSession method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.UpdateSessionRequest} */ public final class UpdateSessionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.UpdateSessionRequest) UpdateSessionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateSessionRequest.newBuilder() to construct. private UpdateSessionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateSessionRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateSessionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1_UpdateSessionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1_UpdateSessionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.UpdateSessionRequest.class, com.google.cloud.discoveryengine.v1.UpdateSessionRequest.Builder.class); } private int bitField0_; public static final int SESSION_FIELD_NUMBER = 1; private com.google.cloud.discoveryengine.v1.Session session_; /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the session field is set. */ @java.lang.Override public boolean hasSession() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The session. */ @java.lang.Override public com.google.cloud.discoveryengine.v1.Session getSession() { return session_ == null ? com.google.cloud.discoveryengine.v1.Session.getDefaultInstance() : session_; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1.SessionOrBuilder getSessionOrBuilder() { return session_ == null ? com.google.cloud.discoveryengine.v1.Session.getDefaultInstance() : session_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSession()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSession()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1.UpdateSessionRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1.UpdateSessionRequest other = (com.google.cloud.discoveryengine.v1.UpdateSessionRequest) obj; if (hasSession() != other.hasSession()) return false; if (hasSession()) { if (!getSession().equals(other.getSession())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSession()) { hash = (37 * hash) + SESSION_FIELD_NUMBER; hash = (53 * hash) + getSession().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1.UpdateSessionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateSession method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1.UpdateSessionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.UpdateSessionRequest) com.google.cloud.discoveryengine.v1.UpdateSessionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1_UpdateSessionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1_UpdateSessionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1.UpdateSessionRequest.class, com.google.cloud.discoveryengine.v1.UpdateSessionRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1.UpdateSessionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSessionFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; session_ = null; if (sessionBuilder_ != null) { sessionBuilder_.dispose(); sessionBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1_UpdateSessionRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1.UpdateSessionRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1.UpdateSessionRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1.UpdateSessionRequest build() { com.google.cloud.discoveryengine.v1.UpdateSessionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1.UpdateSessionRequest buildPartial() { com.google.cloud.discoveryengine.v1.UpdateSessionRequest result = new com.google.cloud.discoveryengine.v1.UpdateSessionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.discoveryengine.v1.UpdateSessionRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.session_ = sessionBuilder_ == null ? session_ : sessionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1.UpdateSessionRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1.UpdateSessionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1.UpdateSessionRequest other) { if (other == com.google.cloud.discoveryengine.v1.UpdateSessionRequest.getDefaultInstance()) return this; if (other.hasSession()) { mergeSession(other.getSession()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getSessionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.discoveryengine.v1.Session session_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1.Session, com.google.cloud.discoveryengine.v1.Session.Builder, com.google.cloud.discoveryengine.v1.SessionOrBuilder> sessionBuilder_; /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the session field is set. */ public boolean hasSession() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The session. */ public com.google.cloud.discoveryengine.v1.Session getSession() { if (sessionBuilder_ == null) { return session_ == null ? com.google.cloud.discoveryengine.v1.Session.getDefaultInstance() : session_; } else { return sessionBuilder_.getMessage(); } } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSession(com.google.cloud.discoveryengine.v1.Session value) { if (sessionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } session_ = value; } else { sessionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSession(com.google.cloud.discoveryengine.v1.Session.Builder builderForValue) { if (sessionBuilder_ == null) { session_ = builderForValue.build(); } else { sessionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSession(com.google.cloud.discoveryengine.v1.Session value) { if (sessionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && session_ != null && session_ != com.google.cloud.discoveryengine.v1.Session.getDefaultInstance()) { getSessionBuilder().mergeFrom(value); } else { session_ = value; } } else { sessionBuilder_.mergeFrom(value); } if (session_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSession() { bitField0_ = (bitField0_ & ~0x00000001); session_ = null; if (sessionBuilder_ != null) { sessionBuilder_.dispose(); sessionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1.Session.Builder getSessionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSessionFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1.SessionOrBuilder getSessionOrBuilder() { if (sessionBuilder_ != null) { return sessionBuilder_.getMessageOrBuilder(); } else { return session_ == null ? com.google.cloud.discoveryengine.v1.Session.getDefaultInstance() : session_; } } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1.Session, com.google.cloud.discoveryengine.v1.Session.Builder, com.google.cloud.discoveryengine.v1.SessionOrBuilder> getSessionFieldBuilder() { if (sessionBuilder_ == null) { sessionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1.Session, com.google.cloud.discoveryengine.v1.Session.Builder, com.google.cloud.discoveryengine.v1.SessionOrBuilder>( getSession(), getParentForChildren(), isClean()); session_ = null; } return sessionBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1.Session] to update. The following * are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.UpdateSessionRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.UpdateSessionRequest) private static final com.google.cloud.discoveryengine.v1.UpdateSessionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.UpdateSessionRequest(); } public static com.google.cloud.discoveryengine.v1.UpdateSessionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateSessionRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateSessionRequest>() { @java.lang.Override public UpdateSessionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateSessionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateSessionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1.UpdateSessionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,359
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/BargeInConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/audio_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * Configuration of the barge-in behavior. Barge-in instructs the API to return * a detected utterance at a proper time while the client is playing back the * response audio from a previous request. When the client sees the * utterance, it should stop the playback and immediately get ready for * receiving the responses for the current request. * * The barge-in handling requires the client to start streaming audio input * as soon as it starts playing back the audio from the previous response. The * playback is modeled into two phases: * * * No barge-in phase: which goes first and during which speech detection * should not be carried out. * * * Barge-in phase: which follows the no barge-in phase and during which * the API starts speech detection and may inform the client that an utterance * has been detected. Note that no-speech event is not expected in this * phase. * * The client provides this configuration in terms of the durations of those * two phases. The durations are measured in terms of the audio length from the * start of the input audio. * * No-speech event is a response with END_OF_UTTERANCE without any transcript * following up. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.BargeInConfig} */ public final class BargeInConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.BargeInConfig) BargeInConfigOrBuilder { private static final long serialVersionUID = 0L; // Use BargeInConfig.newBuilder() to construct. private BargeInConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BargeInConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BargeInConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BargeInConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BargeInConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.class, com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.Builder.class); } private int bitField0_; public static final int NO_BARGE_IN_DURATION_FIELD_NUMBER = 1; private com.google.protobuf.Duration noBargeInDuration_; /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return Whether the noBargeInDuration field is set. */ @java.lang.Override public boolean hasNoBargeInDuration() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return The noBargeInDuration. */ @java.lang.Override public com.google.protobuf.Duration getNoBargeInDuration() { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getNoBargeInDurationOrBuilder() { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } public static final int TOTAL_DURATION_FIELD_NUMBER = 2; private com.google.protobuf.Duration totalDuration_; /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return Whether the totalDuration field is set. */ @java.lang.Override public boolean hasTotalDuration() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return The totalDuration. */ @java.lang.Override public com.google.protobuf.Duration getTotalDuration() { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getTotalDurationOrBuilder() { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getNoBargeInDuration()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getTotalDuration()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getNoBargeInDuration()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTotalDuration()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig other = (com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig) obj; if (hasNoBargeInDuration() != other.hasNoBargeInDuration()) return false; if (hasNoBargeInDuration()) { if (!getNoBargeInDuration().equals(other.getNoBargeInDuration())) return false; } if (hasTotalDuration() != other.hasTotalDuration()) return false; if (hasTotalDuration()) { if (!getTotalDuration().equals(other.getTotalDuration())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasNoBargeInDuration()) { hash = (37 * hash) + NO_BARGE_IN_DURATION_FIELD_NUMBER; hash = (53 * hash) + getNoBargeInDuration().hashCode(); } if (hasTotalDuration()) { hash = (37 * hash) + TOTAL_DURATION_FIELD_NUMBER; hash = (53 * hash) + getTotalDuration().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Configuration of the barge-in behavior. Barge-in instructs the API to return * a detected utterance at a proper time while the client is playing back the * response audio from a previous request. When the client sees the * utterance, it should stop the playback and immediately get ready for * receiving the responses for the current request. * * The barge-in handling requires the client to start streaming audio input * as soon as it starts playing back the audio from the previous response. The * playback is modeled into two phases: * * * No barge-in phase: which goes first and during which speech detection * should not be carried out. * * * Barge-in phase: which follows the no barge-in phase and during which * the API starts speech detection and may inform the client that an utterance * has been detected. Note that no-speech event is not expected in this * phase. * * The client provides this configuration in terms of the durations of those * two phases. The durations are measured in terms of the audio length from the * start of the input audio. * * No-speech event is a response with END_OF_UTTERANCE without any transcript * following up. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.BargeInConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.BargeInConfig) com.google.cloud.dialogflow.cx.v3beta1.BargeInConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BargeInConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BargeInConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.class, com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getNoBargeInDurationFieldBuilder(); getTotalDurationFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; noBargeInDuration_ = null; if (noBargeInDurationBuilder_ != null) { noBargeInDurationBuilder_.dispose(); noBargeInDurationBuilder_ = null; } totalDuration_ = null; if (totalDurationBuilder_ != null) { totalDurationBuilder_.dispose(); totalDurationBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.AudioConfigProto .internal_static_google_cloud_dialogflow_cx_v3beta1_BargeInConfig_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig build() { com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig result = new com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.noBargeInDuration_ = noBargeInDurationBuilder_ == null ? noBargeInDuration_ : noBargeInDurationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.totalDuration_ = totalDurationBuilder_ == null ? totalDuration_ : totalDurationBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig.getDefaultInstance()) return this; if (other.hasNoBargeInDuration()) { mergeNoBargeInDuration(other.getNoBargeInDuration()); } if (other.hasTotalDuration()) { mergeTotalDuration(other.getTotalDuration()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getNoBargeInDurationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getTotalDurationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Duration noBargeInDuration_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> noBargeInDurationBuilder_; /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return Whether the noBargeInDuration field is set. */ public boolean hasNoBargeInDuration() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> * * @return The noBargeInDuration. */ public com.google.protobuf.Duration getNoBargeInDuration() { if (noBargeInDurationBuilder_ == null) { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } else { return noBargeInDurationBuilder_.getMessage(); } } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder setNoBargeInDuration(com.google.protobuf.Duration value) { if (noBargeInDurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } noBargeInDuration_ = value; } else { noBargeInDurationBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder setNoBargeInDuration(com.google.protobuf.Duration.Builder builderForValue) { if (noBargeInDurationBuilder_ == null) { noBargeInDuration_ = builderForValue.build(); } else { noBargeInDurationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder mergeNoBargeInDuration(com.google.protobuf.Duration value) { if (noBargeInDurationBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && noBargeInDuration_ != null && noBargeInDuration_ != com.google.protobuf.Duration.getDefaultInstance()) { getNoBargeInDurationBuilder().mergeFrom(value); } else { noBargeInDuration_ = value; } } else { noBargeInDurationBuilder_.mergeFrom(value); } if (noBargeInDuration_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public Builder clearNoBargeInDuration() { bitField0_ = (bitField0_ & ~0x00000001); noBargeInDuration_ = null; if (noBargeInDurationBuilder_ != null) { noBargeInDurationBuilder_.dispose(); noBargeInDurationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public com.google.protobuf.Duration.Builder getNoBargeInDurationBuilder() { bitField0_ |= 0x00000001; onChanged(); return getNoBargeInDurationFieldBuilder().getBuilder(); } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ public com.google.protobuf.DurationOrBuilder getNoBargeInDurationOrBuilder() { if (noBargeInDurationBuilder_ != null) { return noBargeInDurationBuilder_.getMessageOrBuilder(); } else { return noBargeInDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : noBargeInDuration_; } } /** * * * <pre> * Duration that is not eligible for barge-in at the beginning of the input * audio. * </pre> * * <code>.google.protobuf.Duration no_barge_in_duration = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getNoBargeInDurationFieldBuilder() { if (noBargeInDurationBuilder_ == null) { noBargeInDurationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getNoBargeInDuration(), getParentForChildren(), isClean()); noBargeInDuration_ = null; } return noBargeInDurationBuilder_; } private com.google.protobuf.Duration totalDuration_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> totalDurationBuilder_; /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return Whether the totalDuration field is set. */ public boolean hasTotalDuration() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> * * @return The totalDuration. */ public com.google.protobuf.Duration getTotalDuration() { if (totalDurationBuilder_ == null) { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } else { return totalDurationBuilder_.getMessage(); } } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder setTotalDuration(com.google.protobuf.Duration value) { if (totalDurationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } totalDuration_ = value; } else { totalDurationBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder setTotalDuration(com.google.protobuf.Duration.Builder builderForValue) { if (totalDurationBuilder_ == null) { totalDuration_ = builderForValue.build(); } else { totalDurationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder mergeTotalDuration(com.google.protobuf.Duration value) { if (totalDurationBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && totalDuration_ != null && totalDuration_ != com.google.protobuf.Duration.getDefaultInstance()) { getTotalDurationBuilder().mergeFrom(value); } else { totalDuration_ = value; } } else { totalDurationBuilder_.mergeFrom(value); } if (totalDuration_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public Builder clearTotalDuration() { bitField0_ = (bitField0_ & ~0x00000002); totalDuration_ = null; if (totalDurationBuilder_ != null) { totalDurationBuilder_.dispose(); totalDurationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public com.google.protobuf.Duration.Builder getTotalDurationBuilder() { bitField0_ |= 0x00000002; onChanged(); return getTotalDurationFieldBuilder().getBuilder(); } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ public com.google.protobuf.DurationOrBuilder getTotalDurationOrBuilder() { if (totalDurationBuilder_ != null) { return totalDurationBuilder_.getMessageOrBuilder(); } else { return totalDuration_ == null ? com.google.protobuf.Duration.getDefaultInstance() : totalDuration_; } } /** * * * <pre> * Total duration for the playback at the beginning of the input audio. * </pre> * * <code>.google.protobuf.Duration total_duration = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getTotalDurationFieldBuilder() { if (totalDurationBuilder_ == null) { totalDurationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getTotalDuration(), getParentForChildren(), isClean()); totalDuration_ = null; } return totalDurationBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.BargeInConfig) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.BargeInConfig) private static final com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig(); } public static com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BargeInConfig> PARSER = new com.google.protobuf.AbstractParser<BargeInConfig>() { @java.lang.Override public BargeInConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BargeInConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BargeInConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.BargeInConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hbase
35,489
hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.NoSuchElementException; import java.util.TreeMap; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.ExtendedCell; import org.apache.hadoop.hbase.ExtendedCellScannable; import org.apache.hadoop.hbase.ExtendedCellScanner; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; /** * Single row result of a {@link Get} or {@link Scan} query. * <p> * This class is <b>NOT THREAD SAFE</b>. * <p> * Convenience methods are available that return various {@link Map} structures and values directly. * <p> * To get a complete mapping of all cells in the Result, which can include multiple families and * multiple versions, use {@link #getMap()}. * <p> * To get a mapping of each family to its columns (qualifiers and values), including only the latest * version of each, use {@link #getNoVersionMap()}. To get a mapping of qualifiers to latest values * for an individual family use {@link #getFamilyMap(byte[])}. * <p> * To get the latest value for a specific family and qualifier use * {@link #getValue(byte[], byte[])}. A Result is backed by an array of {@link Cell} objects, each * representing an HBase cell defined by the row, family, qualifier, timestamp, and value. * <p> * The underlying {@link Cell} objects can be accessed through the method {@link #listCells()}. This * will create a List from the internal Cell []. Better is to exploit the fact that a new Result * instance is a primed {@link CellScanner}; just call {@link #advance()} and {@link #current()} to * iterate over Cells as you would any {@link CellScanner}. Call {@link #cellScanner()} to reset * should you need to iterate the same Result over again ({@link CellScanner}s are one-shot). If you * need to overwrite a Result with another Result instance -- as in the old 'mapred' RecordReader * next invocations -- then create an empty Result with the null constructor and in then use * {@link #copyFrom(Result)} */ @InterfaceAudience.Public public class Result implements ExtendedCellScannable, ExtendedCellScanner { private ExtendedCell[] cells; private Boolean exists; // if the query was just to check existence. private boolean stale = false; /** * See {@link #mayHaveMoreCellsInRow()}. */ private boolean mayHaveMoreCellsInRow = false; // We're not using java serialization. Transient here is just a marker to say // that this is where we cache row if we're ever asked for it. private transient byte[] row = null; // Ditto for familyMap. It can be composed on fly from passed in kvs. private transient NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyMap = null; private static ThreadLocal<byte[]> localBuffer = new ThreadLocal<>(); private static final int PAD_WIDTH = 128; public static final Result EMPTY_RESULT = new Result(true); private final static int INITIAL_CELLSCANNER_INDEX = -1; /** * Index for where we are when Result is acting as a {@link CellScanner}. */ private int cellScannerIndex = INITIAL_CELLSCANNER_INDEX; private RegionLoadStats stats; private QueryMetrics metrics = null; private final boolean readonly; private Cursor cursor = null; /** * Creates an empty Result w/ no KeyValue payload; returns null if you call {@link #rawCells()}. * Use this to represent no results if {@code null} won't do or in old 'mapred' as opposed to * 'mapreduce' package MapReduce where you need to overwrite a Result instance with a * {@link #copyFrom(Result)} call. */ public Result() { this(false); } /** * Allows to construct special purpose immutable Result objects, such as EMPTY_RESULT. * @param readonly whether this Result instance is readonly */ private Result(boolean readonly) { this.readonly = readonly; } /** * Instantiate a Result with the specified List of KeyValues. <br> * <strong>Note:</strong> You must ensure that the keyvalues are already sorted. * @param cells List of cells */ public static Result create(List<? extends Cell> cells) { return create(cells, null); } public static Result create(List<? extends Cell> cells, Boolean exists) { return create(cells, exists, false); } public static Result create(List<? extends Cell> cells, Boolean exists, boolean stale) { return create(cells, exists, stale, false); } public static Result create(List<? extends Cell> cells, Boolean exists, boolean stale, boolean mayHaveMoreCellsInRow) { if (exists != null) { return new Result(null, exists, stale, mayHaveMoreCellsInRow); } return new Result(cells.toArray(new ExtendedCell[cells.size()]), null, stale, mayHaveMoreCellsInRow); } /** * Instantiate a Result with the specified array of KeyValues. <br> * <strong>Note:</strong> You must ensure that the keyvalues are already sorted. * @param cells array of cells */ public static Result create(Cell[] cells) { return create(cells, null, false); } public static Result create(Cell[] cells, Boolean exists, boolean stale) { return create(cells, exists, stale, false); } public static Result create(Cell[] cells, Boolean exists, boolean stale, boolean mayHaveMoreCellsInRow) { if (exists != null) { return new Result(null, exists, stale, mayHaveMoreCellsInRow); } ExtendedCell[] extendCells = cells instanceof ExtendedCell[] ? (ExtendedCell[]) cells : Arrays.copyOf(cells, cells.length, ExtendedCell[].class); return new Result(extendCells, null, stale, mayHaveMoreCellsInRow); } // prefer these below methods inside hbase to avoid casting or copying static Result create(ExtendedCell[] cells) { return create(cells, null, false); } static Result create(ExtendedCell[] cells, Boolean exists, boolean stale) { return create(cells, exists, stale, false); } static Result create(ExtendedCell[] cells, Boolean exists, boolean stale, boolean mayHaveMoreCellsInRow) { if (exists != null) { return new Result(null, exists, stale, mayHaveMoreCellsInRow); } return new Result(cells, null, stale, mayHaveMoreCellsInRow); } public static Result createCursorResult(Cursor cursor) { return new Result(cursor); } private Result(Cursor cursor) { this.cursor = cursor; this.readonly = false; } /** Private ctor. Use {@link #create(Cell[])}. */ private Result(ExtendedCell[] cells, Boolean exists, boolean stale, boolean mayHaveMoreCellsInRow) { this.cells = cells; this.exists = exists; this.stale = stale; this.mayHaveMoreCellsInRow = mayHaveMoreCellsInRow; this.readonly = false; } /** * Method for retrieving the row key that corresponds to the row from which this Result was * created. */ public byte[] getRow() { if (this.row == null) { this.row = (this.cells == null || this.cells.length == 0) ? null : CellUtil.cloneRow(this.cells[0]); } return this.row; } /** * Return the array of Cells backing this Result instance. The array is sorted from smallest -&gt; * largest using the {@link CellComparator}. The array only contains what your Get or Scan * specifies and no more. For example if you request column "A" 1 version you will have at most 1 * Cell in the array. If you request column "A" with 2 version you will have at most 2 Cells, with * the first one being the newer timestamp and the second being the older timestamp (this is the * sort order defined by {@link CellComparator}). If columns don't exist, they won't be present in * the result. Therefore if you ask for 1 version all columns, it is safe to iterate over this * array and expect to see 1 Cell for each column and no more. This API is faster than using * getFamilyMap() and getMap() * @return array of Cells; can be null if nothing in the result */ public Cell[] rawCells() { return cells; } ExtendedCell[] rawExtendedCells() { return cells; } /** * Create a sorted list of the Cell's in this result. Since HBase 0.20.5 this is equivalent to * raw(). * @return sorted List of Cells; can be null if no cells in the result */ public List<Cell> listCells() { return isEmpty() ? null : Arrays.asList(rawCells()); } /** * Return the Cells for the specific column. The Cells are sorted in the {@link CellComparator} * order. That implies the first entry in the list is the most recent column. If the query (Scan * or Get) only requested 1 version the list will contain at most 1 entry. If the column did not * exist in the result set (either the column does not exist or the column was not selected in the * query) the list will be empty. Also see getColumnLatest which returns just a Cell * @param family the family * @return a list of Cells for this column or empty list if the column did not exist in the result * set */ public List<Cell> getColumnCells(byte[] family, byte[] qualifier) { List<Cell> result = new ArrayList<>(); Cell[] kvs = rawCells(); if (kvs == null || kvs.length == 0) { return result; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return result; // cant find it } for (int i = pos; i < kvs.length; i++) { if (CellUtil.matchingColumn(kvs[i], family, qualifier)) { result.add(kvs[i]); } else { break; } } return result; } private byte[] notNullBytes(final byte[] bytes) { if (bytes == null) { return HConstants.EMPTY_BYTE_ARRAY; } else { return bytes; } } private int binarySearch(final Cell[] kvs, final byte[] family, final byte[] qualifier) { byte[] familyNotNull = notNullBytes(family); byte[] qualifierNotNull = notNullBytes(qualifier); Cell searchTerm = PrivateCellUtil.createFirstOnRow(kvs[0].getRowArray(), kvs[0].getRowOffset(), kvs[0].getRowLength(), familyNotNull, 0, (byte) familyNotNull.length, qualifierNotNull, 0, qualifierNotNull.length); // pos === ( -(insertion point) - 1) int pos = Arrays.binarySearch(kvs, searchTerm, CellComparator.getInstance()); // never will exact match if (pos < 0) { pos = (pos + 1) * -1; // pos is now insertion point } if (pos == kvs.length) { return -1; // doesn't exist } return pos; } /** * Searches for the latest value for the specified column. * @param kvs the array to search * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return the index where the value was found, or -1 otherwise */ private int binarySearch(final Cell[] kvs, final byte[] family, final int foffset, final int flength, final byte[] qualifier, final int qoffset, final int qlength) { double keyValueSize = (double) KeyValue.getKeyValueDataStructureSize(kvs[0].getRowLength(), flength, qlength, 0); byte[] buffer = localBuffer.get(); if (buffer == null || keyValueSize > buffer.length) { // pad to the smallest multiple of the pad width buffer = new byte[(int) Math.ceil(keyValueSize / PAD_WIDTH) * PAD_WIDTH]; localBuffer.set(buffer); } Cell searchTerm = KeyValueUtil.createFirstOnRow(buffer, 0, kvs[0].getRowArray(), kvs[0].getRowOffset(), kvs[0].getRowLength(), family, foffset, flength, qualifier, qoffset, qlength); // pos === ( -(insertion point) - 1) int pos = Arrays.binarySearch(kvs, searchTerm, CellComparator.getInstance()); // never will exact match if (pos < 0) { pos = (pos + 1) * -1; // pos is now insertion point } if (pos == kvs.length) { return -1; // doesn't exist } return pos; } /** * The Cell for the most recent timestamp for a given column. * @return the Cell for the column, or null if no value exists in the row or none have been * selected in the query (Get/Scan) */ public Cell getColumnLatestCell(byte[] family, byte[] qualifier) { Cell[] kvs = rawCells(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } int pos = binarySearch(kvs, family, qualifier); if (pos == -1) { return null; } if (CellUtil.matchingColumn(kvs[pos], family, qualifier)) { return kvs[pos]; } return null; } /** * The Cell for the most recent timestamp for a given column. * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return the Cell for the column, or null if no value exists in the row or none have been * selected in the query (Get/Scan) */ public Cell getColumnLatestCell(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength) { Cell[] kvs = rawCells(); // side effect possibly. if (kvs == null || kvs.length == 0) { return null; } int pos = binarySearch(kvs, family, foffset, flength, qualifier, qoffset, qlength); if (pos == -1) { return null; } if ( PrivateCellUtil.matchingColumn(kvs[pos], family, foffset, flength, qualifier, qoffset, qlength) ) { return kvs[pos]; } return null; } /** * Get the latest version of the specified column. Note: this call clones the value content of the * hosting Cell. See {@link #getValueAsByteBuffer(byte[], byte[])}, etc., or {@link #listCells()} * if you would avoid the cloning. * @param family family name * @param qualifier column qualifier * @return value of latest version of column, null if none found */ public byte[] getValue(byte[] family, byte[] qualifier) { Cell kv = getColumnLatestCell(family, qualifier); if (kv == null) { return null; } return CellUtil.cloneValue(kv); } /** * Returns the value wrapped in a new <code>ByteBuffer</code>. * @param family family name * @param qualifier column qualifier * @return the latest version of the column, or <code>null</code> if none found */ public ByteBuffer getValueAsByteBuffer(byte[] family, byte[] qualifier) { Cell kv = getColumnLatestCell(family, 0, family.length, qualifier, 0, qualifier.length); if (kv == null) { return null; } return ByteBuffer.wrap(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()) .asReadOnlyBuffer(); } /** * Returns the value wrapped in a new <code>ByteBuffer</code>. * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return the latest version of the column, or <code>null</code> if none found */ public ByteBuffer getValueAsByteBuffer(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); if (kv == null) { return null; } return ByteBuffer.wrap(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()) .asReadOnlyBuffer(); } /** * Loads the latest version of the specified column into the provided <code>ByteBuffer</code>. * <p> * Does not clear or flip the buffer. * @param family family name * @param qualifier column qualifier * @param dst the buffer where to write the value * @return <code>true</code> if a value was found, <code>false</code> otherwise * @throws BufferOverflowException there is insufficient space remaining in the buffer */ public boolean loadValue(byte[] family, byte[] qualifier, ByteBuffer dst) throws BufferOverflowException { return loadValue(family, 0, family.length, qualifier, 0, qualifier.length, dst); } /** * Loads the latest version of the specified column into the provided <code>ByteBuffer</code>. * <p> * Does not clear or flip the buffer. * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @param dst the buffer where to write the value * @return <code>true</code> if a value was found, <code>false</code> otherwise * @throws BufferOverflowException there is insufficient space remaining in the buffer */ public boolean loadValue(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength, ByteBuffer dst) throws BufferOverflowException { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); if (kv == null) { return false; } dst.put(kv.getValueArray(), kv.getValueOffset(), kv.getValueLength()); return true; } /** * Checks if the specified column contains a non-empty value (not a zero-length byte array). * @param family family name * @param qualifier column qualifier * @return whether or not a latest value exists and is not empty */ public boolean containsNonEmptyColumn(byte[] family, byte[] qualifier) { return containsNonEmptyColumn(family, 0, family.length, qualifier, 0, qualifier.length); } /** * Checks if the specified column contains a non-empty value (not a zero-length byte array). * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return whether or not a latest value exists and is not empty */ public boolean containsNonEmptyColumn(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); return (kv != null) && (kv.getValueLength() > 0); } /** * Checks if the specified column contains an empty value (a zero-length byte array). * @param family family name * @param qualifier column qualifier * @return whether or not a latest value exists and is empty */ public boolean containsEmptyColumn(byte[] family, byte[] qualifier) { return containsEmptyColumn(family, 0, family.length, qualifier, 0, qualifier.length); } /** * Checks if the specified column contains an empty value (a zero-length byte array). * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return whether or not a latest value exists and is empty */ public boolean containsEmptyColumn(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength) { Cell kv = getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength); return (kv != null) && (kv.getValueLength() == 0); } /** * Checks for existence of a value for the specified column (empty or not). * @param family family name * @param qualifier column qualifier * @return true if at least one value exists in the result, false if not */ public boolean containsColumn(byte[] family, byte[] qualifier) { Cell kv = getColumnLatestCell(family, qualifier); return kv != null; } /** * Checks for existence of a value for the specified column (empty or not). * @param family family name * @param foffset family offset * @param flength family length * @param qualifier column qualifier * @param qoffset qualifier offset * @param qlength qualifier length * @return true if at least one value exists in the result, false if not */ public boolean containsColumn(byte[] family, int foffset, int flength, byte[] qualifier, int qoffset, int qlength) { return getColumnLatestCell(family, foffset, flength, qualifier, qoffset, qlength) != null; } /** * Map of families to all versions of its qualifiers and values. * <p> * Returns a three level Map of the form: * <code>Map&amp;family,Map&lt;qualifier,Map&lt;timestamp,value&gt;&gt;&gt;</code> * <p> * Note: All other map returning methods make use of this map internally. * @return map from families to qualifiers to versions */ public NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> getMap() { if (this.familyMap != null) { return this.familyMap; } if (isEmpty()) { return null; } this.familyMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (Cell kv : this.cells) { byte[] family = CellUtil.cloneFamily(kv); NavigableMap<byte[], NavigableMap<Long, byte[]>> columnMap = familyMap.get(family); if (columnMap == null) { columnMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); familyMap.put(family, columnMap); } byte[] qualifier = CellUtil.cloneQualifier(kv); NavigableMap<Long, byte[]> versionMap = columnMap.get(qualifier); if (versionMap == null) { versionMap = new TreeMap<>(new Comparator<Long>() { @Override public int compare(Long l1, Long l2) { return l2.compareTo(l1); } }); columnMap.put(qualifier, versionMap); } Long timestamp = kv.getTimestamp(); byte[] value = CellUtil.cloneValue(kv); versionMap.put(timestamp, value); } return this.familyMap; } /** * Map of families to their most recent qualifiers and values. * <p> * Returns a two level Map of the form: <code>Map&amp;family,Map&lt;qualifier,value&gt;&gt;</code> * <p> * The most recent version of each qualifier will be used. * @return map from families to qualifiers and value */ public NavigableMap<byte[], NavigableMap<byte[], byte[]>> getNoVersionMap() { if (this.familyMap == null) { getMap(); } if (isEmpty()) { return null; } NavigableMap<byte[], NavigableMap<byte[], byte[]>> returnMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (Map.Entry<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> familyEntry : familyMap .entrySet()) { NavigableMap<byte[], byte[]> qualifierMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); for (Map.Entry<byte[], NavigableMap<Long, byte[]>> qualifierEntry : familyEntry.getValue() .entrySet()) { byte[] value = qualifierEntry.getValue().get(qualifierEntry.getValue().firstKey()); qualifierMap.put(qualifierEntry.getKey(), value); } returnMap.put(familyEntry.getKey(), qualifierMap); } return returnMap; } /** * Map of qualifiers to values. * <p> * Returns a Map of the form: <code>Map&lt;qualifier,value&gt;</code> * @param family column family to get * @return map of qualifiers to values */ public NavigableMap<byte[], byte[]> getFamilyMap(byte[] family) { if (this.familyMap == null) { getMap(); } if (isEmpty()) { return null; } NavigableMap<byte[], byte[]> returnMap = new TreeMap<>(Bytes.BYTES_COMPARATOR); NavigableMap<byte[], NavigableMap<Long, byte[]>> qualifierMap = familyMap.get(family); if (qualifierMap == null) { return returnMap; } for (Map.Entry<byte[], NavigableMap<Long, byte[]>> entry : qualifierMap.entrySet()) { byte[] value = entry.getValue().get(entry.getValue().firstKey()); returnMap.put(entry.getKey(), value); } return returnMap; } /** * Returns the value of the first column in the Result. * @return value of the first column */ public byte[] value() { if (isEmpty()) { return null; } return CellUtil.cloneValue(cells[0]); } /** * Check if the underlying Cell [] is empty or not * @return true if empty */ public boolean isEmpty() { return this.cells == null || this.cells.length == 0; } /** Returns the size of the underlying Cell [] */ public int size() { return this.cells == null ? 0 : this.cells.length; } /** * */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("keyvalues="); if (isEmpty()) { sb.append("NONE"); return sb.toString(); } sb.append("{"); boolean moreThanOne = false; for (Cell kv : this.cells) { if (moreThanOne) { sb.append(", "); } else { moreThanOne = true; } sb.append(kv.toString()); } sb.append("}"); return sb.toString(); } /** * Does a deep comparison of two Results, down to the byte arrays. * @param res1 first result to compare * @param res2 second result to compare * @throws Exception Every difference is throwing an exception */ public static void compareResults(Result res1, Result res2) throws Exception { compareResults(res1, res2, true); } /** * Does a deep comparison of two Results, down to the byte arrays. * @param res1 first result to compare * @param res2 second result to compare * @param verbose includes string representation for all cells in the exception if true; otherwise * include rowkey only * @throws Exception Every difference is throwing an exception */ public static void compareResults(Result res1, Result res2, boolean verbose) throws Exception { if (res2 == null) { throw new Exception( "There wasn't enough rows, we stopped at " + Bytes.toStringBinary(res1.getRow())); } if (res1.size() != res2.size()) { if (verbose) { throw new Exception( "This row doesn't have the same number of KVs: " + res1 + " compared to " + res2); } else { throw new Exception( "This row doesn't have the same number of KVs: row=" + Bytes.toStringBinary(res1.getRow()) + ", " + res1.size() + " cells are compared to " + res2.size() + " cells"); } } ExtendedCell[] ourKVs = res1.cells; ExtendedCell[] replicatedKVs = res2.cells; for (int i = 0; i < res1.size(); i++) { if ( !ourKVs[i].equals(replicatedKVs[i]) || !CellUtil.matchingValue(ourKVs[i], replicatedKVs[i]) || !PrivateCellUtil.matchingTags(ourKVs[i], replicatedKVs[i]) ) { if (verbose) { throw new Exception("This result was different: " + res1 + " compared to " + res2); } else { throw new Exception( "This result was different: row=" + Bytes.toStringBinary(res1.getRow())); } } } } /** * Forms a single result from the partial results in the partialResults list. This method is * useful for reconstructing partial results on the client side. * @param partialResults list of partial results * @return The complete result that is formed by combining all of the partial results together * @throws IOException A complete result cannot be formed because the results in the partial list * come from different rows */ public static Result createCompleteResult(Iterable<Result> partialResults) throws IOException { if (partialResults == null) { return Result.create(Collections.emptyList(), null, false); } List<Cell> cells = new ArrayList<>(); boolean stale = false; byte[] prevRow = null; byte[] currentRow = null; for (Iterator<Result> iter = partialResults.iterator(); iter.hasNext();) { Result r = iter.next(); currentRow = r.getRow(); if (prevRow != null && !Bytes.equals(prevRow, currentRow)) { throw new IOException("Cannot form complete result. Rows of partial results do not match." + " Partial Results: " + partialResults); } // Ensure that all Results except the last one are marked as partials. The last result // may not be marked as a partial because Results are only marked as partials when // the scan on the server side must be stopped due to reaching the maxResultSize. // Visualizing it makes it easier to understand: // maxResultSize: 2 cells // (-x-) represents cell number x in a row // Example: row1: -1- -2- -3- -4- -5- (5 cells total) // How row1 will be returned by the server as partial Results: // Result1: -1- -2- (2 cells, size limit reached, mark as partial) // Result2: -3- -4- (2 cells, size limit reached, mark as partial) // Result3: -5- (1 cell, size limit NOT reached, NOT marked as partial) if (iter.hasNext() && !r.mayHaveMoreCellsInRow()) { throw new IOException("Cannot form complete result. Result is missing partial flag. " + "Partial Results: " + partialResults); } prevRow = currentRow; stale = stale || r.isStale(); Collections.addAll(cells, r.rawCells()); } return Result.create(cells, null, stale); } /** * Get total size of raw cells * @return Total size. */ public static long getTotalSizeOfCells(Result result) { long size = 0; if (result.isEmpty()) { return size; } for (Cell c : result.rawCells()) { size += c.heapSize(); } return size; } /** * Copy another Result into this one. Needed for the old Mapred framework * @throws UnsupportedOperationException if invoked on instance of EMPTY_RESULT (which is supposed * to be immutable). */ public void copyFrom(Result other) { checkReadonly(); this.row = null; this.familyMap = null; this.cells = other.cells; } /** * For client users: You should only use the return value as a * {@link org.apache.hadoop.hbase.CellScanner}, {@link ExtendedCellScanner} is marked as * IA.Private which means there is no guarantee about its API stability. */ @Override public ExtendedCellScanner cellScanner() { // Reset this.cellScannerIndex = INITIAL_CELLSCANNER_INDEX; return this; } /** * For client users: You should only use the return value as a {@link Cell}, {@link ExtendedCell} * is marked as IA.Private which means there is no guarantee about its API stability. */ @Override public ExtendedCell current() { if ( isEmpty() || cellScannerIndex == INITIAL_CELLSCANNER_INDEX || cellScannerIndex >= cells.length ) { return null; } return this.cells[cellScannerIndex]; } @Override public boolean advance() { if (isEmpty()) { return false; } cellScannerIndex++; if (cellScannerIndex < this.cells.length) { return true; } else if (cellScannerIndex == this.cells.length) { return false; } throw new NoSuchElementException("Cannot advance beyond the last cell"); } public Boolean getExists() { return exists; } public void setExists(Boolean exists) { checkReadonly(); this.exists = exists; } /** * Whether or not the results are coming from possibly stale data. Stale results might be returned * if {@link Consistency} is not STRONG for the query. * @return Whether or not the results are coming from possibly stale data. */ public boolean isStale() { return stale; } /** * For scanning large rows, the RS may choose to return the cells chunk by chunk to prevent OOM or * timeout. This flag is used to tell you if the current Result is the last one of the current * row. False means this Result is the last one. True means there MAY be more cells belonging to * the current row. If you don't use {@link Scan#setAllowPartialResults(boolean)} or * {@link Scan#setBatch(int)}, this method will always return false because the Result must * contains all cells in one Row. */ public boolean mayHaveMoreCellsInRow() { return mayHaveMoreCellsInRow; } /** * Set load information about the region to the information about the result * @param loadStats statistics about the current region from which this was returned */ @InterfaceAudience.Private public void setStatistics(RegionLoadStats loadStats) { this.stats = loadStats; } @InterfaceAudience.Private public void setMetrics(QueryMetrics metrics) { this.metrics = metrics; } /** * Returns the associated statistics about the region from which this was returned. Can be * <tt>null</tt> if stats are disabled. */ public RegionLoadStats getStats() { return stats; } /** Returns the query metrics, or {@code null} if we do not enable metrics. */ public QueryMetrics getMetrics() { return metrics; } /** * All methods modifying state of Result object must call this method to ensure that special * purpose immutable Results can't be accidentally modified. */ private void checkReadonly() { if (readonly == true) { throw new UnsupportedOperationException("Attempting to modify readonly EMPTY_RESULT!"); } } /** * Return true if this Result is a cursor to tell users where the server has scanned. In this * Result the only meaningful method is {@link #getCursor()}. {@code * while (r = scanner.next() && r != null) { * if(r.isCursor()){ * // scanning is not end, it is a cursor, save its row key and close scanner if you want, or * // just continue the loop to call next(). } else { // just like before } } // scanning is end } * {@link Scan#setNeedCursorResult(boolean)} {@link Cursor} {@link #getCursor()} */ public boolean isCursor() { return cursor != null; } /** * Return the cursor if this Result is a cursor result. {@link Scan#setNeedCursorResult(boolean)} * {@link Cursor} {@link #isCursor()} */ public Cursor getCursor() { return cursor; } }
openjdk/jmc
32,771
application/org.openjdk.jmc.joverflow/src/main/java/org/openjdk/jmc/joverflow/heap/parser/HprofReader.java
/* * Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved. * * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The contents of this file are subject to the terms of either the Universal Permissive License * v 1.0 as shown at https://oss.oracle.com/licenses/upl * * or the following license: * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * 3. Neither the name of the copyright holder nor the names of its contributors may be used to * endorse or promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.openjdk.jmc.joverflow.heap.parser; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import org.openjdk.jmc.joverflow.heap.model.ArrayTypeCodes; import org.openjdk.jmc.joverflow.heap.model.JavaBoolean; import org.openjdk.jmc.joverflow.heap.model.JavaByte; import org.openjdk.jmc.joverflow.heap.model.JavaChar; import org.openjdk.jmc.joverflow.heap.model.JavaClass; import org.openjdk.jmc.joverflow.heap.model.JavaDouble; import org.openjdk.jmc.joverflow.heap.model.JavaField; import org.openjdk.jmc.joverflow.heap.model.JavaFloat; import org.openjdk.jmc.joverflow.heap.model.JavaInt; import org.openjdk.jmc.joverflow.heap.model.JavaLong; import org.openjdk.jmc.joverflow.heap.model.JavaObjectRef; import org.openjdk.jmc.joverflow.heap.model.JavaShort; import org.openjdk.jmc.joverflow.heap.model.JavaThing; import org.openjdk.jmc.joverflow.heap.model.Root; import org.openjdk.jmc.joverflow.heap.model.Snapshot; import org.openjdk.jmc.joverflow.heap.model.StackFrame; import org.openjdk.jmc.joverflow.heap.model.StackTrace; import org.openjdk.jmc.joverflow.util.FileUtils; import org.openjdk.jmc.joverflow.util.LongToObjectMap; import org.openjdk.jmc.joverflow.util.MiscUtils; import org.openjdk.jmc.joverflow.util.VerboseOutputCollector; /** * Functionality for reading a hprof file. */ class HprofReader extends HeapDumpReader /* imports */ implements ArrayTypeCodes { final static int MAGIC_NUMBER = 0x4a415641; // That's "JAVA", the first part of "JAVA PROFILE ..." private final static String[] VERSIONS = {" PROFILE 1.0\0", " PROFILE 1.0.1\0", " PROFILE 1.0.2\0",}; // The following version numbers are indices into VERSIONS. The instance // data member version is set to one of these, and it drives decisions when // reading the file. // // Version 1.0.1 added HPROF_GC_PRIM_ARRAY_DUMP, which requires no // version-sensitive parsing. // // Version 1.0.1 changed the type of a constant pool entry from a signature // to a typecode. // // Version 1.0.2 added HPROF_HEAP_DUMP_SEGMENT and HPROF_HEAP_DUMP_END // to allow a large heap to be dumped as a sequence of heap dump segments. // // The HPROF agent in J2SE 1.2 through to 5.0 generate a version 1.0.1 // file. In Java SE 6.0 the version is either 1.0.1 or 1.0.2 depending on // the size of the heap (normally it will be 1.0.1 but for multi-GB // heaps the heap dump will not fit in a HPROF_HEAP_DUMP record so the // dump is generated as version 1.0.2). @SuppressWarnings("unused") private final static int VERSION_JDK12BETA3 = 0; private final static int VERSION_JDK12BETA4 = 1; private final static int VERSION_JDK6 = 2; // Record types static final int HPROF_UTF8 = 0x01; static final int HPROF_LOAD_CLASS = 0x02; static final int HPROF_UNLOAD_CLASS = 0x03; static final int HPROF_FRAME = 0x04; static final int HPROF_TRACE = 0x05; static final int HPROF_ALLOC_SITES = 0x06; static final int HPROF_HEAP_SUMMARY = 0x07; static final int HPROF_START_THREAD = 0x0a; static final int HPROF_END_THREAD = 0x0b; static final int HPROF_HEAP_DUMP = 0x0c; static final int HPROF_CPU_SAMPLES = 0x0d; static final int HPROF_CONTROL_SETTINGS = 0x0e; static final int HPROF_LOCKSTATS_WAIT_TIME = 0x10; static final int HPROF_LOCKSTATS_HOLD_TIME = 0x11; static final int HPROF_GC_ROOT_UNKNOWN = 0xff; static final int HPROF_GC_ROOT_JNI_GLOBAL = 0x01; static final int HPROF_GC_ROOT_JNI_LOCAL = 0x02; static final int HPROF_GC_ROOT_JAVA_FRAME = 0x03; static final int HPROF_GC_ROOT_NATIVE_STACK = 0x04; static final int HPROF_GC_ROOT_STICKY_CLASS = 0x05; static final int HPROF_GC_ROOT_THREAD_BLOCK = 0x06; static final int HPROF_GC_ROOT_MONITOR_USED = 0x07; static final int HPROF_GC_ROOT_THREAD_OBJ = 0x08; static final int HPROF_GC_CLASS_DUMP = 0x20; static final int HPROF_GC_INSTANCE_DUMP = 0x21; static final int HPROF_GC_OBJ_ARRAY_DUMP = 0x22; static final int HPROF_GC_PRIM_ARRAY_DUMP = 0x23; static final int HPROF_HEAP_DUMP_SEGMENT = 0x1c; static final int HPROF_HEAP_DUMP_END = 0x2c; private final static int T_CLASS = 2; private final ReadBuffer.Factory bufFactory; private final File hprofFile; // Non-null if we use a real disk file private final byte[] fileImageBytes; // Non-null if we use a byte[] array with file image private PositionDataInputStream in; private final long fileSize; private int version; // The version of .hprof being read private int dumpsToSkip; private int identifierSize; // Size, in bytes, of identifiers in HPROF file. private final LongToObjectMap<String> names; // HashMap<Integer, ThreadObject>, used to map the thread sequence number // (aka "serial number") to the thread object ID for // HPROF_GC_ROOT_THREAD_OBJ. ThreadObject is a trivial inner class, // at the end of this file. private HashMap<Integer, ThreadObject> threadObjects; /** Maps class object ID to class name (in dotted format) */ private LongToObjectMap<String> classNameFromObjectID; // Maps stack frame ID to StackFrame. // Null if we are not tracking call stacks private HashMap<Long, StackFrame> stackFrames; // HashMap<Integer, StackTrace> maps stack frame ID to StackTrace // Null if we are not tracking call stacks private HashMap<Integer, StackTrace> stackTraces; // Maps class serial # to class object ID // Null if we are not tracking call stacks private HashMap<Integer, String> classNameFromSerialNo; private Snapshot.Builder snpBuilder; // Maximum size of a mapped byte buffer used for lazy reads of object contents. // If the heap dump file is longer than this, we need to use more than one BB for it. private static final int MAX_BB_SIZE = Integer.MAX_VALUE; // True if heap dump is longer than 2GB - in that case we'll have to create multiple // mapped byte buffers to perform random reads efficiently private final boolean longFile; private final ArrayList<Long> mappedBBEndOfs; private long currentBBMaxOfs; private long prevObjStartOfs; // If > 0, use this instead of the value that we half-read/half-guess from the snapshot private final int explicitPointerSize; // Diagnostics and progress tracking private final VerboseOutputCollector vc; private volatile boolean cancelled; HprofReader(ReadBuffer.Factory bufFactory, boolean callStack, int explicitPointerSize, VerboseOutputCollector vc) throws DumpCorruptedException { this.bufFactory = bufFactory; String fileName = bufFactory.getFileName(); int dumpNumber = 1; if (fileName != null) { int pos = fileName.lastIndexOf('#'); if (pos > -1) { String num = fileName.substring(pos + 1, fileName.length()); try { dumpNumber = Integer.parseInt(num, 10); } catch (NumberFormatException ex) { String msg = "in file name \"" + fileName + "\", a dump number was " + "expected after the :, but \"" + num + "\" was found instead."; throw new DumpCorruptedException(msg); } fileName = fileName.substring(0, pos); } } fileImageBytes = bufFactory.getFileImageBytes(); if (fileImageBytes == null) { // .hprof file will be read from disk try { hprofFile = FileUtils.checkFileExistsAndReadable(fileName, false); this.fileSize = hprofFile.length(); if (fileSize == 0) { throw new DumpCorruptedException("file size is 0"); } } catch (IOException ex) { throw new DumpCorruptedException(ex.getMessage()); } } else { // We have the .hprof file bytes in Java heap - typically in tests hprofFile = null; this.fileSize = fileImageBytes.length; } this.vc = vc; this.dumpsToSkip = dumpNumber - 1; this.explicitPointerSize = explicitPointerSize; names = new LongToObjectMap<>((int) (fileSize / 100000), false); threadObjects = new HashMap<>(43); classNameFromObjectID = new LongToObjectMap<>(1000, false); if (callStack) { stackFrames = new HashMap<>(43); stackTraces = new HashMap<>(43); classNameFromSerialNo = new HashMap<>(); } longFile = fileSize > MAX_BB_SIZE; if (longFile) { mappedBBEndOfs = new ArrayList<>(); currentBBMaxOfs = MAX_BB_SIZE - 1; } else { mappedBBEndOfs = null; } } @Override public Snapshot read() throws DumpCorruptedException, HprofParsingCancelledException { String dumpCorruptedExMsg = ""; ReadBuffer readBuf = null; try { if (hprofFile != null) { in = new PositionDataInputStream(new BufferedInputStream(new FileInputStream(hprofFile))); } else { in = new PositionDataInputStream(new ByteArrayInputStream(fileImageBytes)); } doRead(); // Some very simple/obvious sanity checks if (snpBuilder.getNumAllObjects() == 0) { throw new DumpCorruptedException("did not read any objects"); } if (snpBuilder.getNumClasses() == 0) { throw new DumpCorruptedException("did not read any classes"); } snpBuilder.onFinishReadObjects(); long[] mappedBBEndOfsArray = null; if (mappedBBEndOfs != null) { mappedBBEndOfsArray = new long[mappedBBEndOfs.size() + 1]; for (int i = 0; i < mappedBBEndOfs.size(); i++) { mappedBBEndOfsArray[i] = mappedBBEndOfs.get(i); } mappedBBEndOfsArray[mappedBBEndOfsArray.length - 1] = fileSize - 1; } readBuf = bufFactory.create(mappedBBEndOfsArray); } catch (IOException ex) { dumpCorruptedExMsg = "caught exception " + ex + ". Details:\n"; StringWriter exWriterBuf = new StringWriter(200); ex.printStackTrace(new PrintWriter(exWriterBuf)); dumpCorruptedExMsg += exWriterBuf.toString(); } finally { if (in != null) { try { in.close(); } catch (IOException ex) { dumpCorruptedExMsg += "\nAlso, could not close the stream properly: caught exception " + ex; if (readBuf != null) { readBuf.close(); } } } } if (dumpCorruptedExMsg.length() > 0) { throw new DumpCorruptedException(dumpCorruptedExMsg); } try { return snpBuilder.buildSnapshot(readBuf); } catch (RuntimeException ex) { if (readBuf != null) { readBuf.close(); } throw ex; } catch (Error er) { // Most likely an OOM if (readBuf != null) { readBuf.close(); } throw er; } } @Override public synchronized int getProgressPercentage() { if (in != null) { return (int) (in.position() * 100 / fileSize); } else { return 0; } } @Override public void cancelReading() { cancelled = true; } private void doRead() throws DumpCorruptedException, IOException, HprofParsingCancelledException { int magicNumber = in.readInt(); if (magicNumber != MAGIC_NUMBER) { throw new DumpCorruptedException("unrecognized magic number: " + magicNumber); } version = readVersionHeader(); identifierSize = in.readInt(); if (identifierSize != 4 && identifierSize != 8) { throw new DumpCorruptedException("unsupported format: " + "specifies pointer size of " + identifierSize + ". JOverflow supports only size 4 and 8."); } snpBuilder = new Snapshot.Builder(fileSize, identifierSize, explicitPointerSize, vc); skipBytes(8); // long creationDate = in.readLong(); // System.out.println("Dump file created " + (new Date(creationDate))); while (true) { int type; try { type = in.readUnsignedByte(); } catch (EOFException ignored) { break; } in.readInt(); // Timestamp of this record // Length of record: readInt() will return negative value for record length >2GB. So store 32bit value in long to keep it unsigned. long length = in.readInt() & 0xffffffffL; // System.out.println("Read record type " + type + ", length " + length + " at position " + toHex(currPos)); if (length < 0) { throw new DumpCorruptedException( "bad record length of " + length + " at byte " + (in.position() - 4) + " of file."); } switch (type) { case HPROF_UTF8: { long id = readID(); byte[] chars = new byte[(int) length - identifierSize]; in.readFully(chars); names.put(id, new String(chars)); break; } case HPROF_LOAD_CLASS: { int serialNo = in.readInt(); // Not used long classID = readID(); in.readInt(); // int stackTraceSerialNo, unused long classNameID = readID(); String nm = getNameFromID(classNameID).replace('/', '.'); classNameFromObjectID.put(classID, nm); if (classNameFromSerialNo != null) { classNameFromSerialNo.put(serialNo, nm); } break; } case HPROF_HEAP_DUMP: { if (dumpsToSkip <= 0) { try { vc.debug("Sub-dump of length " + length + " starts at position " + in.position()); readHeapDump(length); } catch (EOFException exp) { handleEOF(exp); } // System.out.println("Finished processing instances in heap dump."); return; } else { dumpsToSkip--; skipBytes(length); } break; } case HPROF_HEAP_DUMP_END: { if (version >= VERSION_JDK6) { if (dumpsToSkip <= 0) { skipBytes(length); // should be no-op return; } else { // skip this dump (of the end record for a sequence of dump segments) dumpsToSkip--; } } else { // HPROF_HEAP_DUMP_END only recognized in >= 1.0.2 vc.addWarning("Ignoring unrecognized record type", Integer.toString(type)); } skipBytes(length); // should be no-op break; } case HPROF_HEAP_DUMP_SEGMENT: { if (version >= VERSION_JDK6) { if (dumpsToSkip <= 0) { try { vc.debug("Segment of length " + length + " starts at position " + in.position()); // read the dump segment readHeapDump(length); } catch (EOFException exp) { handleEOF(exp); } } else { // all segments comprising the heap dump will be skipped skipBytes(length); } } else { // HPROF_HEAP_DUMP_SEGMENT only recognized in >= 1.0.2 vc.addWarning("Ignoring unrecognized record type", Integer.toString(type)); skipBytes(length); } break; } case HPROF_FRAME: { if (stackFrames == null) { skipBytes(length); } else { long id = readID(); String methodName = getNameFromID(readID()); String methodSig = getNameFromID(readID()); String sourceFile = getNameFromID(readID()); int classSer = in.readInt(); String className = classNameFromSerialNo.get(Integer.valueOf(classSer)); int lineNumber = in.readInt(); if (lineNumber < StackFrame.LINE_NUMBER_NATIVE) { vc.addWarning("Weird stack frame line number", Integer.toString(lineNumber)); lineNumber = StackFrame.LINE_NUMBER_UNKNOWN; } stackFrames.put(id, new StackFrame(methodName, methodSig, className, sourceFile, lineNumber)); } break; } case HPROF_TRACE: { if (stackTraces == null) { skipBytes(length); } else { int serialNo = in.readInt(); in.readInt(); // int threadSeq, not used StackFrame[] frames = new StackFrame[in.readInt()]; for (int i = 0; i < frames.length; i++) { long fid = readID(); frames[i] = stackFrames.get(Long.valueOf(fid)); if (frames[i] == null) { throw new DumpCorruptedException("stack frame " + toHex(fid) + " not found"); } } stackTraces.put(serialNo, new StackTrace(frames)); } break; } case HPROF_HEAP_SUMMARY: case HPROF_UNLOAD_CLASS: case HPROF_ALLOC_SITES: case HPROF_START_THREAD: case HPROF_END_THREAD: case HPROF_CPU_SAMPLES: case HPROF_CONTROL_SETTINGS: case HPROF_LOCKSTATS_WAIT_TIME: case HPROF_LOCKSTATS_HOLD_TIME: { // Ignore these record types skipBytes(length); break; } default: { skipBytes(length); vc.addWarning("Ignoring unrecognized record type", Integer.toString(type)); } } } } private void skipBytes(long length) throws IOException, DumpCorruptedException { long remainingBytes = length; do { int skippedBytes = in.skipBytes((int) length); remainingBytes -= skippedBytes; if (remainingBytes > 0) { if (in.position() >= fileSize) { throw new DumpCorruptedException("Reached end of file while trying to skip " + length + " bytes"); } } } while (remainingBytes > 0); } private int readVersionHeader() throws IOException, DumpCorruptedException { int candidatesLeft = VERSIONS.length; boolean[] matched = new boolean[VERSIONS.length]; for (int i = 0; i < candidatesLeft; i++) { matched[i] = true; } int pos = 0; while (candidatesLeft > 0) { char c = (char) in.readByte(); for (int i = 0; i < VERSIONS.length; i++) { if (matched[i]) { if (c != VERSIONS[i].charAt(pos)) { // Not matched matched[i] = false; --candidatesLeft; } else if (pos == VERSIONS[i].length() - 1) { // Full match vc.debug("Hprof file version: " + VERSIONS[i]); return i; } } } ++pos; } throw new DumpCorruptedException("version string not recognized at byte " + (pos + 3)); } private void readHeapDump(long dumpLength) throws DumpCorruptedException, IOException, HprofParsingCancelledException { long startPos = in.position(); long endPos = startPos + dumpLength; // "Chunks" below are used to check for cancellation periodically int curChunk = (int) (in.position() >> 19); // Check every 512K long id, pos; while ((pos = in.position()) < endPos) { int recordType = in.readUnsignedByte(); int newCurChunk = (int) (pos >> 19); if (newCurChunk > curChunk) { curChunk = newCurChunk; checkForCancellation(); } switch (recordType) { case HPROF_GC_INSTANCE_DUMP: { readInstance(); break; } case HPROF_GC_OBJ_ARRAY_DUMP: { readArray(false); break; } case HPROF_GC_PRIM_ARRAY_DUMP: { readArray(true); break; } case HPROF_GC_ROOT_UNKNOWN: { id = readID(); snpBuilder.addRoot(new Root(id, 0, Root.UNKNOWN, "")); break; } case HPROF_GC_ROOT_THREAD_OBJ: { id = readID(); int threadSeq = in.readInt(); int stackSeq = in.readInt(); threadObjects.put(threadSeq, new ThreadObject(id, stackSeq)); break; } case HPROF_GC_ROOT_JNI_GLOBAL: { id = readID(); readID(); // long globalRefId, ignored for now snpBuilder.addRoot(new Root(id, 0, Root.JNI_GLOBAL, "")); break; } case HPROF_GC_ROOT_JNI_LOCAL: { id = readID(); int threadSeq = in.readInt(); int depth = in.readInt(); ThreadObject to = getThreadObjectFromSequence(threadSeq); StackTrace st = getStackTraceFromSerial(to.stackSeq); if (st != null) { st = st.traceForDepth(depth + 1); } snpBuilder.addRoot(new Root(id, to.threadId, Root.JNI_LOCAL, "", st)); break; } case HPROF_GC_ROOT_JAVA_FRAME: { id = readID(); int threadSeq = in.readInt(); int depth = in.readInt(); ThreadObject to = getThreadObjectFromSequence(threadSeq); StackTrace st = getStackTraceFromSerial(to.stackSeq); if (st != null) { st = st.traceForDepth(depth + 1); } snpBuilder.addRoot(new Root(id, to.threadId, Root.JAVA_LOCAL, "", st)); break; } case HPROF_GC_ROOT_NATIVE_STACK: { id = readID(); int threadSeq = in.readInt(); ThreadObject to = getThreadObjectFromSequence(threadSeq); StackTrace st = getStackTraceFromSerial(to.stackSeq); snpBuilder.addRoot(new Root(id, to.threadId, Root.NATIVE_STACK, "", st)); break; } case HPROF_GC_ROOT_STICKY_CLASS: { id = readID(); snpBuilder.addRoot(new Root(id, 0, Root.SYSTEM_CLASS, "")); break; } case HPROF_GC_ROOT_THREAD_BLOCK: { id = readID(); int threadSeq = in.readInt(); ThreadObject to = getThreadObjectFromSequence(threadSeq); StackTrace st = getStackTraceFromSerial(to.stackSeq); snpBuilder.addRoot(new Root(id, to.threadId, Root.THREAD_BLOCK, "", st)); break; } case HPROF_GC_ROOT_MONITOR_USED: { id = readID(); snpBuilder.addRoot(new Root(id, 0, Root.BUSY_MONITOR, "")); break; } case HPROF_GC_CLASS_DUMP: { readClass(); break; } default: { throw new DumpCorruptedException("unrecognized heap dump sub-record type: " + recordType + ". Technical info: position = " + pos + ", bytes left = " + (endPos - pos)); } } } if (pos != endPos) { vc.addWarning("Error reading heap dump or heap dump segment", "Byte count is " + pos + " instead of " + endPos + ". Difference is " + (endPos - pos)); skipBytes(endPos - pos); } } private long readID() throws IOException { return (identifierSize == 4) ? (Snapshot.SMALL_ID_MASK & in.readInt()) : in.readLong(); } /** * Read a java value. If result is non-null, it's expected to be an array of one element. We use * it to fake multiple return values. Returns the number of bytes read. */ private int readValue(JavaThing[] resultArr) throws DumpCorruptedException, IOException { byte type = in.readByte(); return 1 + readValueForType(type, resultArr); } private int readValueForType(byte type, JavaThing[] resultArr) throws DumpCorruptedException, IOException { if (version >= VERSION_JDK12BETA4) { type = signatureFromTypeId(type); } return readValueForTypeSignature(type, resultArr); } private int readValueForTypeSignature(byte type, JavaThing[] resultArr) throws DumpCorruptedException, IOException { switch (type) { case '[': case 'L': { long id = readID(); if (resultArr != null) { resultArr[0] = new JavaObjectRef(id); } return identifierSize; } case 'Z': { int b = in.readByte(); if (b != 0 && b != 1) { vc.addWarning("Illegal boolean value read", Integer.toString(b)); } if (resultArr != null) { resultArr[0] = new JavaBoolean(b != 0); } return 1; } case 'B': { byte b = in.readByte(); if (resultArr != null) { resultArr[0] = new JavaByte(b); } return 1; } case 'S': { short s = in.readShort(); if (resultArr != null) { resultArr[0] = new JavaShort(s); } return 2; } case 'C': { char ch = in.readChar(); if (resultArr != null) { resultArr[0] = new JavaChar(ch); } return 2; } case 'I': { int val = in.readInt(); if (resultArr != null) { resultArr[0] = new JavaInt(val); } return 4; } case 'J': { long val = in.readLong(); if (resultArr != null) { resultArr[0] = new JavaLong(val); } return 8; } case 'F': { float val = in.readFloat(); if (resultArr != null) { resultArr[0] = new JavaFloat(val); } return 4; } case 'D': { double val = in.readDouble(); if (resultArr != null) { resultArr[0] = new JavaDouble(val); } return 8; } default: { throw new DumpCorruptedException("Bad value signature: " + type); } } } private ThreadObject getThreadObjectFromSequence(int threadSeq) throws DumpCorruptedException, IOException { ThreadObject to = threadObjects.get(Integer.valueOf(threadSeq)); if (to == null) { throw new DumpCorruptedException("thread " + threadSeq + " not found for JNI local ref"); } return to; } private String getNameFromID(long id) throws IOException { if (id == 0L) { return ""; } String result = names.get(id); if (result == null) { vc.addWarning("name not found", "at " + toHex(id)); return "unresolved name " + toHex(id); } return result; } private StackTrace getStackTraceFromSerial(int ser) throws IOException { if (stackTraces == null) { return null; } StackTrace result = stackTraces.get(Integer.valueOf(ser)); if (result == null) { vc.addWarning("Stack trace not found", "for serial # " + ser); } return result; } /** Handles a HPROF_GC_CLASS_DUMP. Returns the number of bytes read. */ private int readClass() throws DumpCorruptedException, IOException { long id = readID(); skipBytes(4); // StackTrace stackTrace = getStackTraceFromSerial(in.readInt()); long superId = readID(); long classLoaderId = readID(); long signersId = readID(); long protDomainId = readID(); readID(); // long reserved1, unused readID(); // long reserved2, unused int fieldsSize = in.readInt(); int bytesRead = 7 * identifierSize + 8; int numConstPoolEntries = in.readUnsignedShort(); bytesRead += 2; for (int i = 0; i < numConstPoolEntries; i++) { in.readUnsignedShort(); // int index, unused bytesRead += 2; bytesRead += readValue(null); // We ignore the values } int numStatics = in.readUnsignedShort(); bytesRead += 2; // We may need additional quasi-fields for signers and protection domain int numQuasiFields = (signersId != 0 || protDomainId != 0) ? 2 : 0; int nAllStatics = numStatics + numQuasiFields; JavaField[] staticFields = nAllStatics > 0 ? new JavaField[nAllStatics] : JavaClass.NO_FIELDS; JavaThing[] staticValues = nAllStatics > 0 ? new JavaThing[nAllStatics] : JavaClass.NO_VALUES; if (numStatics > 0) { JavaThing[] valueBin = new JavaThing[1]; for (int i = 0; i < numStatics; i++) { long nameId = readID(); bytesRead += identifierSize; byte type = in.readByte(); bytesRead++; bytesRead += readValueForType(type, valueBin); String fieldName = getNameFromID(nameId); if (version >= VERSION_JDK12BETA4) { type = signatureFromTypeId(type); } staticFields[i] = JavaField.newInstance(fieldName, (char) type, snpBuilder.getPointerSize()); staticValues[i] = valueBin[0]; } } if (numQuasiFields > 0) { JavaField.addStaticQuaziFields(staticFields); } int numFields = in.readUnsignedShort(); bytesRead += 2; JavaField[] fields = numFields > 0 ? new JavaField[numFields] : JavaClass.NO_FIELDS; for (int i = 0; i < numFields; i++) { long nameId = readID(); bytesRead += identifierSize; byte type = in.readByte(); bytesRead++; String fieldName = getNameFromID(nameId); if (version >= VERSION_JDK12BETA4) { type = signatureFromTypeId(type); } fields[i] = JavaField.newInstance(fieldName, (char) type, snpBuilder.getPointerSize()); } String name = classNameFromObjectID.get(id); if (name == null) { vc.addWarning("Class name not found", "for " + toHex(id)); name = "unknown-name@" + toHex(id); } JavaClass c = new JavaClass(id, name, superId, classLoaderId, signersId, protDomainId, fields, staticFields, staticValues, fieldsSize, snpBuilder.getInMemoryInstanceSize(fieldsSize)); snpBuilder.addClass(c); return bytesRead; } private String toHex(long addr) { return MiscUtils.toHex(addr); } /** * Handles a HPROF_GC_INSTANCE_DUMP Return number of bytes read */ private int readInstance() throws DumpCorruptedException, IOException { long objOfsInFile = in.position(); long id = readID(); skipBytes(4); // StackTrace stackTrace = getStackTraceFromSerial(in.readInt()); long classID = readID(); int objDataSize = in.readInt(); int bytesRead = (2 * identifierSize) + 8 + objDataSize; skipBytes(objDataSize); snpBuilder.addJavaObject(id, classID, objOfsInFile, objDataSize); if (longFile) { handlePossibleBBBorder(objOfsInFile); } return bytesRead; } /** * Handles a HPROF_GC_OBJ_ARRAY_DUMP or HPROF_GC_PRIM_ARRAY_DUMP. Returns number of bytes read. */ private int readArray(boolean isPrimitive) throws DumpCorruptedException, IOException { long objOfsInFile = in.position(); long id = readID(); skipBytes(4); // StackTrace stackTrace = getStackTraceFromSerial(in.readInt()); int num = in.readInt(); int bytesRead = identifierSize + 8; long arrayClassID; if (isPrimitive) { arrayClassID = in.readByte(); bytesRead++; } else { arrayClassID = readID(); bytesRead += identifierSize; } // Check for primitive arrays: char primitiveSignature = 0x00; int elSize = 0; if (isPrimitive || version < VERSION_JDK12BETA4) { switch ((int) arrayClassID) { case T_BOOLEAN: { primitiveSignature = 'Z'; elSize = 1; break; } case T_CHAR: { primitiveSignature = 'C'; elSize = 2; break; } case T_FLOAT: { primitiveSignature = 'F'; elSize = 4; break; } case T_DOUBLE: { primitiveSignature = 'D'; elSize = 8; break; } case T_BYTE: { primitiveSignature = 'B'; elSize = 1; break; } case T_SHORT: { primitiveSignature = 'S'; elSize = 2; break; } case T_INT: { primitiveSignature = 'I'; elSize = 4; break; } case T_LONG: { primitiveSignature = 'J'; elSize = 8; break; } } if (version >= VERSION_JDK12BETA4 && primitiveSignature == 0x00) { throw new DumpCorruptedException("unrecognized typecode: " + arrayClassID); } } int dataSize = isPrimitive ? elSize * num : identifierSize * num; if (in.position() + dataSize > fileSize) { throw new DumpCorruptedException((isPrimitive ? "Primitive" : "Object") + " array at position " + in.position() + " is " + dataSize + " bytes long, that does not fit into the dump file"); } bytesRead += dataSize; skipBytes(dataSize); if (isPrimitive) { snpBuilder.addJavaValueArray(id, primitiveSignature, objOfsInFile, num, dataSize); } else { snpBuilder.addJavaObjectArray(id, arrayClassID, objOfsInFile, num, dataSize); } if (longFile) { handlePossibleBBBorder(objOfsInFile); } return bytesRead; } private byte signatureFromTypeId(byte typeId) throws DumpCorruptedException, IOException { switch (typeId) { case T_CLASS: return (byte) 'L'; case T_BOOLEAN: return (byte) 'Z'; case T_CHAR: return (byte) 'C'; case T_FLOAT: return (byte) 'F'; case T_DOUBLE: return (byte) 'D'; case T_BYTE: return (byte) 'B'; case T_SHORT: return (byte) 'S'; case T_INT: return (byte) 'I'; case T_LONG: return (byte) 'J'; default: throw new DumpCorruptedException("invalid type id of " + typeId); } } private void handlePossibleBBBorder(long thisObjStartOfs) { if (thisObjStartOfs >= currentBBMaxOfs) { if (prevObjStartOfs > 0) { // Normal case mappedBBEndOfs.add(prevObjStartOfs - 1); } else { // Seems to happen only in tests, when maxBBSize is small mappedBBEndOfs.add(Long.valueOf(MAX_BB_SIZE)); } currentBBMaxOfs = mappedBBEndOfs.get(mappedBBEndOfs.size() - 1) + MAX_BB_SIZE; } prevObjStartOfs = thisObjStartOfs; } private void handleEOF(EOFException exp) { vc.addWarning("Unexpected EOF", "Will miss information"); // we have EOF, we have to tolerate missing references snpBuilder.setUnresolvedObjectsOk(true); } private void checkForCancellation() throws HprofParsingCancelledException { if (cancelled) { throw new HprofParsingCancelledException(); } } /** * A trivial data-holder class for HPROF_GC_ROOT_THREAD_OBJ. */ private static class ThreadObject { long threadId; int stackSeq; ThreadObject(long threadId, int stackSeq) { this.threadId = threadId; this.stackSeq = stackSeq; } } }
googleapis/google-api-java-client-services
35,544
clients/google-api-services-aiplatform/v1/2.0.0/com/google/api/services/aiplatform/v1/model/GoogleCloudAiplatformV1BatchPredictionJob.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.aiplatform.v1.model; /** * A job that uses a Model to produce predictions on multiple input instances. If predictions for * significant portion of the instances fail, the job may finish without attempting predictions for * all remaining instances. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Vertex AI API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class GoogleCloudAiplatformV1BatchPredictionJob extends com.google.api.client.json.GenericJson { /** * Output only. Statistics on completed and failed prediction instances. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1CompletionStats completionStats; /** * Output only. Time when the BatchPredictionJob was created. * The value may be {@code null}. */ @com.google.api.client.util.Key private String createTime; /** * The config of resources used by the Model during the batch prediction. If the Model supports * DEDICATED_RESOURCES this config may be provided (and the job will use these resources), if the * Model doesn't support AUTOMATIC_RESOURCES, this config must be provided. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1BatchDedicatedResources dedicatedResources; /** * For custom-trained Models and AutoML Tabular Models, the container of the DeployedModel * instances will send `stderr` and `stdout` streams to Cloud Logging by default. Please note that * the logs incur cost, which are subject to [Cloud Logging * pricing](https://cloud.google.com/logging/pricing). User can disable container logging by * setting this flag to true. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean disableContainerLogging; /** * Required. The user-defined name of this BatchPredictionJob. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String displayName; /** * Customer-managed encryption key options for a BatchPredictionJob. If this is set, then all * resources created by the BatchPredictionJob will be encrypted with the provided encryption key. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1EncryptionSpec encryptionSpec; /** * Output only. Time when the BatchPredictionJob entered any of the following states: * `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. * The value may be {@code null}. */ @com.google.api.client.util.Key private String endTime; /** * Output only. Only populated when the job's state is JOB_STATE_FAILED or JOB_STATE_CANCELLED. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleRpcStatus error; /** * Explanation configuration for this BatchPredictionJob. Can be specified only if * generate_explanation is set to `true`. This value overrides the value of * Model.explanation_spec. All fields of explanation_spec are optional in the request. If a field * of the explanation_spec object is not populated, the corresponding field of the * Model.explanation_spec object is inherited. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1ExplanationSpec explanationSpec; /** * Generate explanation with the batch prediction results. When set to `true`, the batch * prediction output changes based on the `predictions_format` field of the * BatchPredictionJob.output_config object: * `bigquery`: output includes a column named * `explanation`. The value is a struct that conforms to the Explanation object. * `jsonl`: The * JSON objects on each line include an additional entry keyed `explanation`. The value of the * entry is a JSON object that conforms to the Explanation object. * `csv`: Generating * explanations for CSV format is not supported. If this field is set to true, either the * Model.explanation_spec or explanation_spec must be populated. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean generateExplanation; /** * Required. Input configuration of the instances on which predictions are performed. The schema * of any single instance may be specified via the Model's PredictSchemata's instance_schema_uri. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1BatchPredictionJobInputConfig inputConfig; /** * Configuration for how to convert batch prediction input instances to the prediction instances * that are sent to the Model. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig instanceConfig; /** * The labels with user-defined metadata to organize BatchPredictionJobs. Label keys and values * can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, * numeric characters, underscores and dashes. International characters are allowed. See * https://goo.gl/xmQnxf for more information and examples of labels. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Immutable. Parameters configuring the batch behavior. Currently only applicable when * dedicated_resources are used (in other cases Vertex AI does the tuning itself). * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1ManualBatchTuningParameters manualBatchTuningParameters; /** * The name of the Model resource that produces the predictions via this job, must share the same * ancestor Location. Starting this job has no impact on any existing deployments of the Model and * their resources. Exactly one of model and unmanaged_container_model must be set. The model * resource name may contain version id or version alias to specify the version. Example: * `projects/{project}/locations/{location}/models/{model}@2` or * `projects/{project}/locations/{location}/models/{model}@golden` if no version is specified, the * default version will be deployed. The model resource could also be a publisher model. Example: * `publishers/{publisher}/models/{model}` or * `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String model; /** * The parameters that govern the predictions. The schema of the parameters may be specified via * the Model's PredictSchemata's parameters_schema_uri. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Object modelParameters; /** * Output only. The version ID of the Model that produces the predictions via this job. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String modelVersionId; /** * Output only. Resource name of the BatchPredictionJob. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Required. The Configuration specifying where output predictions should be written. The schema * of any single prediction may be specified as a concatenation of Model's PredictSchemata's * instance_schema_uri and prediction_schema_uri. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1BatchPredictionJobOutputConfig outputConfig; /** * Output only. Information further describing the output of this job. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1BatchPredictionJobOutputInfo outputInfo; /** * Output only. Partial failures encountered. For example, single files that can't be read. This * field never exceeds 20 entries. Status details fields contain standard Google Cloud error * details. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<GoogleRpcStatus> partialFailures; /** * Output only. Information about resources that had been consumed by this job. Provided in real * time at best effort basis, as well as a final value once the job completes. Note: This field * currently may be not populated for batch predictions that use AutoML Models. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1ResourcesConsumed resourcesConsumed; /** * Output only. Reserved for future use. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean satisfiesPzi; /** * Output only. Reserved for future use. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean satisfiesPzs; /** * The service account that the DeployedModel's container runs as. If not specified, a system * generated one will be used, which has minimal permissions and the custom container, if used, * may not have enough permission to access other Google Cloud resources. Users deploying the * Model must have the `iam.serviceAccounts.actAs` permission on this service account. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String serviceAccount; /** * Output only. Time when the BatchPredictionJob for the first time entered the * `JOB_STATE_RUNNING` state. * The value may be {@code null}. */ @com.google.api.client.util.Key private String startTime; /** * Output only. The detailed state of the job. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String state; /** * Contains model information necessary to perform batch prediction without requiring uploading to * model registry. Exactly one of model and unmanaged_container_model must be set. * The value may be {@code null}. */ @com.google.api.client.util.Key private GoogleCloudAiplatformV1UnmanagedContainerModel unmanagedContainerModel; /** * Output only. Time when the BatchPredictionJob was most recently updated. * The value may be {@code null}. */ @com.google.api.client.util.Key private String updateTime; /** * Output only. Statistics on completed and failed prediction instances. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1CompletionStats getCompletionStats() { return completionStats; } /** * Output only. Statistics on completed and failed prediction instances. * @param completionStats completionStats or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setCompletionStats(GoogleCloudAiplatformV1CompletionStats completionStats) { this.completionStats = completionStats; return this; } /** * Output only. Time when the BatchPredictionJob was created. * @return value or {@code null} for none */ public String getCreateTime() { return createTime; } /** * Output only. Time when the BatchPredictionJob was created. * @param createTime createTime or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setCreateTime(String createTime) { this.createTime = createTime; return this; } /** * The config of resources used by the Model during the batch prediction. If the Model supports * DEDICATED_RESOURCES this config may be provided (and the job will use these resources), if the * Model doesn't support AUTOMATIC_RESOURCES, this config must be provided. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1BatchDedicatedResources getDedicatedResources() { return dedicatedResources; } /** * The config of resources used by the Model during the batch prediction. If the Model supports * DEDICATED_RESOURCES this config may be provided (and the job will use these resources), if the * Model doesn't support AUTOMATIC_RESOURCES, this config must be provided. * @param dedicatedResources dedicatedResources or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setDedicatedResources(GoogleCloudAiplatformV1BatchDedicatedResources dedicatedResources) { this.dedicatedResources = dedicatedResources; return this; } /** * For custom-trained Models and AutoML Tabular Models, the container of the DeployedModel * instances will send `stderr` and `stdout` streams to Cloud Logging by default. Please note that * the logs incur cost, which are subject to [Cloud Logging * pricing](https://cloud.google.com/logging/pricing). User can disable container logging by * setting this flag to true. * @return value or {@code null} for none */ public java.lang.Boolean getDisableContainerLogging() { return disableContainerLogging; } /** * For custom-trained Models and AutoML Tabular Models, the container of the DeployedModel * instances will send `stderr` and `stdout` streams to Cloud Logging by default. Please note that * the logs incur cost, which are subject to [Cloud Logging * pricing](https://cloud.google.com/logging/pricing). User can disable container logging by * setting this flag to true. * @param disableContainerLogging disableContainerLogging or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setDisableContainerLogging(java.lang.Boolean disableContainerLogging) { this.disableContainerLogging = disableContainerLogging; return this; } /** * Required. The user-defined name of this BatchPredictionJob. * @return value or {@code null} for none */ public java.lang.String getDisplayName() { return displayName; } /** * Required. The user-defined name of this BatchPredictionJob. * @param displayName displayName or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setDisplayName(java.lang.String displayName) { this.displayName = displayName; return this; } /** * Customer-managed encryption key options for a BatchPredictionJob. If this is set, then all * resources created by the BatchPredictionJob will be encrypted with the provided encryption key. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1EncryptionSpec getEncryptionSpec() { return encryptionSpec; } /** * Customer-managed encryption key options for a BatchPredictionJob. If this is set, then all * resources created by the BatchPredictionJob will be encrypted with the provided encryption key. * @param encryptionSpec encryptionSpec or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setEncryptionSpec(GoogleCloudAiplatformV1EncryptionSpec encryptionSpec) { this.encryptionSpec = encryptionSpec; return this; } /** * Output only. Time when the BatchPredictionJob entered any of the following states: * `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. * @return value or {@code null} for none */ public String getEndTime() { return endTime; } /** * Output only. Time when the BatchPredictionJob entered any of the following states: * `JOB_STATE_SUCCEEDED`, `JOB_STATE_FAILED`, `JOB_STATE_CANCELLED`. * @param endTime endTime or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setEndTime(String endTime) { this.endTime = endTime; return this; } /** * Output only. Only populated when the job's state is JOB_STATE_FAILED or JOB_STATE_CANCELLED. * @return value or {@code null} for none */ public GoogleRpcStatus getError() { return error; } /** * Output only. Only populated when the job's state is JOB_STATE_FAILED or JOB_STATE_CANCELLED. * @param error error or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setError(GoogleRpcStatus error) { this.error = error; return this; } /** * Explanation configuration for this BatchPredictionJob. Can be specified only if * generate_explanation is set to `true`. This value overrides the value of * Model.explanation_spec. All fields of explanation_spec are optional in the request. If a field * of the explanation_spec object is not populated, the corresponding field of the * Model.explanation_spec object is inherited. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1ExplanationSpec getExplanationSpec() { return explanationSpec; } /** * Explanation configuration for this BatchPredictionJob. Can be specified only if * generate_explanation is set to `true`. This value overrides the value of * Model.explanation_spec. All fields of explanation_spec are optional in the request. If a field * of the explanation_spec object is not populated, the corresponding field of the * Model.explanation_spec object is inherited. * @param explanationSpec explanationSpec or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setExplanationSpec(GoogleCloudAiplatformV1ExplanationSpec explanationSpec) { this.explanationSpec = explanationSpec; return this; } /** * Generate explanation with the batch prediction results. When set to `true`, the batch * prediction output changes based on the `predictions_format` field of the * BatchPredictionJob.output_config object: * `bigquery`: output includes a column named * `explanation`. The value is a struct that conforms to the Explanation object. * `jsonl`: The * JSON objects on each line include an additional entry keyed `explanation`. The value of the * entry is a JSON object that conforms to the Explanation object. * `csv`: Generating * explanations for CSV format is not supported. If this field is set to true, either the * Model.explanation_spec or explanation_spec must be populated. * @return value or {@code null} for none */ public java.lang.Boolean getGenerateExplanation() { return generateExplanation; } /** * Generate explanation with the batch prediction results. When set to `true`, the batch * prediction output changes based on the `predictions_format` field of the * BatchPredictionJob.output_config object: * `bigquery`: output includes a column named * `explanation`. The value is a struct that conforms to the Explanation object. * `jsonl`: The * JSON objects on each line include an additional entry keyed `explanation`. The value of the * entry is a JSON object that conforms to the Explanation object. * `csv`: Generating * explanations for CSV format is not supported. If this field is set to true, either the * Model.explanation_spec or explanation_spec must be populated. * @param generateExplanation generateExplanation or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setGenerateExplanation(java.lang.Boolean generateExplanation) { this.generateExplanation = generateExplanation; return this; } /** * Required. Input configuration of the instances on which predictions are performed. The schema * of any single instance may be specified via the Model's PredictSchemata's instance_schema_uri. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJobInputConfig getInputConfig() { return inputConfig; } /** * Required. Input configuration of the instances on which predictions are performed. The schema * of any single instance may be specified via the Model's PredictSchemata's instance_schema_uri. * @param inputConfig inputConfig or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setInputConfig(GoogleCloudAiplatformV1BatchPredictionJobInputConfig inputConfig) { this.inputConfig = inputConfig; return this; } /** * Configuration for how to convert batch prediction input instances to the prediction instances * that are sent to the Model. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig getInstanceConfig() { return instanceConfig; } /** * Configuration for how to convert batch prediction input instances to the prediction instances * that are sent to the Model. * @param instanceConfig instanceConfig or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setInstanceConfig(GoogleCloudAiplatformV1BatchPredictionJobInstanceConfig instanceConfig) { this.instanceConfig = instanceConfig; return this; } /** * The labels with user-defined metadata to organize BatchPredictionJobs. Label keys and values * can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, * numeric characters, underscores and dashes. International characters are allowed. See * https://goo.gl/xmQnxf for more information and examples of labels. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * The labels with user-defined metadata to organize BatchPredictionJobs. Label keys and values * can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, * numeric characters, underscores and dashes. International characters are allowed. See * https://goo.gl/xmQnxf for more information and examples of labels. * @param labels labels or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Immutable. Parameters configuring the batch behavior. Currently only applicable when * dedicated_resources are used (in other cases Vertex AI does the tuning itself). * @return value or {@code null} for none */ public GoogleCloudAiplatformV1ManualBatchTuningParameters getManualBatchTuningParameters() { return manualBatchTuningParameters; } /** * Immutable. Parameters configuring the batch behavior. Currently only applicable when * dedicated_resources are used (in other cases Vertex AI does the tuning itself). * @param manualBatchTuningParameters manualBatchTuningParameters or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setManualBatchTuningParameters(GoogleCloudAiplatformV1ManualBatchTuningParameters manualBatchTuningParameters) { this.manualBatchTuningParameters = manualBatchTuningParameters; return this; } /** * The name of the Model resource that produces the predictions via this job, must share the same * ancestor Location. Starting this job has no impact on any existing deployments of the Model and * their resources. Exactly one of model and unmanaged_container_model must be set. The model * resource name may contain version id or version alias to specify the version. Example: * `projects/{project}/locations/{location}/models/{model}@2` or * `projects/{project}/locations/{location}/models/{model}@golden` if no version is specified, the * default version will be deployed. The model resource could also be a publisher model. Example: * `publishers/{publisher}/models/{model}` or * `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` * @return value or {@code null} for none */ public java.lang.String getModel() { return model; } /** * The name of the Model resource that produces the predictions via this job, must share the same * ancestor Location. Starting this job has no impact on any existing deployments of the Model and * their resources. Exactly one of model and unmanaged_container_model must be set. The model * resource name may contain version id or version alias to specify the version. Example: * `projects/{project}/locations/{location}/models/{model}@2` or * `projects/{project}/locations/{location}/models/{model}@golden` if no version is specified, the * default version will be deployed. The model resource could also be a publisher model. Example: * `publishers/{publisher}/models/{model}` or * `projects/{project}/locations/{location}/publishers/{publisher}/models/{model}` * @param model model or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setModel(java.lang.String model) { this.model = model; return this; } /** * The parameters that govern the predictions. The schema of the parameters may be specified via * the Model's PredictSchemata's parameters_schema_uri. * @return value or {@code null} for none */ public java.lang.Object getModelParameters() { return modelParameters; } /** * The parameters that govern the predictions. The schema of the parameters may be specified via * the Model's PredictSchemata's parameters_schema_uri. * @param modelParameters modelParameters or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setModelParameters(java.lang.Object modelParameters) { this.modelParameters = modelParameters; return this; } /** * Output only. The version ID of the Model that produces the predictions via this job. * @return value or {@code null} for none */ public java.lang.String getModelVersionId() { return modelVersionId; } /** * Output only. The version ID of the Model that produces the predictions via this job. * @param modelVersionId modelVersionId or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setModelVersionId(java.lang.String modelVersionId) { this.modelVersionId = modelVersionId; return this; } /** * Output only. Resource name of the BatchPredictionJob. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Output only. Resource name of the BatchPredictionJob. * @param name name or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setName(java.lang.String name) { this.name = name; return this; } /** * Required. The Configuration specifying where output predictions should be written. The schema * of any single prediction may be specified as a concatenation of Model's PredictSchemata's * instance_schema_uri and prediction_schema_uri. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJobOutputConfig getOutputConfig() { return outputConfig; } /** * Required. The Configuration specifying where output predictions should be written. The schema * of any single prediction may be specified as a concatenation of Model's PredictSchemata's * instance_schema_uri and prediction_schema_uri. * @param outputConfig outputConfig or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setOutputConfig(GoogleCloudAiplatformV1BatchPredictionJobOutputConfig outputConfig) { this.outputConfig = outputConfig; return this; } /** * Output only. Information further describing the output of this job. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJobOutputInfo getOutputInfo() { return outputInfo; } /** * Output only. Information further describing the output of this job. * @param outputInfo outputInfo or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setOutputInfo(GoogleCloudAiplatformV1BatchPredictionJobOutputInfo outputInfo) { this.outputInfo = outputInfo; return this; } /** * Output only. Partial failures encountered. For example, single files that can't be read. This * field never exceeds 20 entries. Status details fields contain standard Google Cloud error * details. * @return value or {@code null} for none */ public java.util.List<GoogleRpcStatus> getPartialFailures() { return partialFailures; } /** * Output only. Partial failures encountered. For example, single files that can't be read. This * field never exceeds 20 entries. Status details fields contain standard Google Cloud error * details. * @param partialFailures partialFailures or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setPartialFailures(java.util.List<GoogleRpcStatus> partialFailures) { this.partialFailures = partialFailures; return this; } /** * Output only. Information about resources that had been consumed by this job. Provided in real * time at best effort basis, as well as a final value once the job completes. Note: This field * currently may be not populated for batch predictions that use AutoML Models. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1ResourcesConsumed getResourcesConsumed() { return resourcesConsumed; } /** * Output only. Information about resources that had been consumed by this job. Provided in real * time at best effort basis, as well as a final value once the job completes. Note: This field * currently may be not populated for batch predictions that use AutoML Models. * @param resourcesConsumed resourcesConsumed or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setResourcesConsumed(GoogleCloudAiplatformV1ResourcesConsumed resourcesConsumed) { this.resourcesConsumed = resourcesConsumed; return this; } /** * Output only. Reserved for future use. * @return value or {@code null} for none */ public java.lang.Boolean getSatisfiesPzi() { return satisfiesPzi; } /** * Output only. Reserved for future use. * @param satisfiesPzi satisfiesPzi or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setSatisfiesPzi(java.lang.Boolean satisfiesPzi) { this.satisfiesPzi = satisfiesPzi; return this; } /** * Output only. Reserved for future use. * @return value or {@code null} for none */ public java.lang.Boolean getSatisfiesPzs() { return satisfiesPzs; } /** * Output only. Reserved for future use. * @param satisfiesPzs satisfiesPzs or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setSatisfiesPzs(java.lang.Boolean satisfiesPzs) { this.satisfiesPzs = satisfiesPzs; return this; } /** * The service account that the DeployedModel's container runs as. If not specified, a system * generated one will be used, which has minimal permissions and the custom container, if used, * may not have enough permission to access other Google Cloud resources. Users deploying the * Model must have the `iam.serviceAccounts.actAs` permission on this service account. * @return value or {@code null} for none */ public java.lang.String getServiceAccount() { return serviceAccount; } /** * The service account that the DeployedModel's container runs as. If not specified, a system * generated one will be used, which has minimal permissions and the custom container, if used, * may not have enough permission to access other Google Cloud resources. Users deploying the * Model must have the `iam.serviceAccounts.actAs` permission on this service account. * @param serviceAccount serviceAccount or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setServiceAccount(java.lang.String serviceAccount) { this.serviceAccount = serviceAccount; return this; } /** * Output only. Time when the BatchPredictionJob for the first time entered the * `JOB_STATE_RUNNING` state. * @return value or {@code null} for none */ public String getStartTime() { return startTime; } /** * Output only. Time when the BatchPredictionJob for the first time entered the * `JOB_STATE_RUNNING` state. * @param startTime startTime or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setStartTime(String startTime) { this.startTime = startTime; return this; } /** * Output only. The detailed state of the job. * @return value or {@code null} for none */ public java.lang.String getState() { return state; } /** * Output only. The detailed state of the job. * @param state state or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setState(java.lang.String state) { this.state = state; return this; } /** * Contains model information necessary to perform batch prediction without requiring uploading to * model registry. Exactly one of model and unmanaged_container_model must be set. * @return value or {@code null} for none */ public GoogleCloudAiplatformV1UnmanagedContainerModel getUnmanagedContainerModel() { return unmanagedContainerModel; } /** * Contains model information necessary to perform batch prediction without requiring uploading to * model registry. Exactly one of model and unmanaged_container_model must be set. * @param unmanagedContainerModel unmanagedContainerModel or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setUnmanagedContainerModel(GoogleCloudAiplatformV1UnmanagedContainerModel unmanagedContainerModel) { this.unmanagedContainerModel = unmanagedContainerModel; return this; } /** * Output only. Time when the BatchPredictionJob was most recently updated. * @return value or {@code null} for none */ public String getUpdateTime() { return updateTime; } /** * Output only. Time when the BatchPredictionJob was most recently updated. * @param updateTime updateTime or {@code null} for none */ public GoogleCloudAiplatformV1BatchPredictionJob setUpdateTime(String updateTime) { this.updateTime = updateTime; return this; } @Override public GoogleCloudAiplatformV1BatchPredictionJob set(String fieldName, Object value) { return (GoogleCloudAiplatformV1BatchPredictionJob) super.set(fieldName, value); } @Override public GoogleCloudAiplatformV1BatchPredictionJob clone() { return (GoogleCloudAiplatformV1BatchPredictionJob) super.clone(); } }
googleapis/google-cloud-java
35,294
java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListIndexesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/visionai/v1/warehouse.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.visionai.v1; /** * * * <pre> * Response message for ListIndexes. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListIndexesResponse} */ public final class ListIndexesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListIndexesResponse) ListIndexesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListIndexesResponse.newBuilder() to construct. private ListIndexesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListIndexesResponse() { indexes_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListIndexesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListIndexesResponse.class, com.google.cloud.visionai.v1.ListIndexesResponse.Builder.class); } public static final int INDEXES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.visionai.v1.Index> indexes_; /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.visionai.v1.Index> getIndexesList() { return indexes_; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.visionai.v1.IndexOrBuilder> getIndexesOrBuilderList() { return indexes_; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ @java.lang.Override public int getIndexesCount() { return indexes_.size(); } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.Index getIndexes(int index) { return indexes_.get(index); } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ @java.lang.Override public com.google.cloud.visionai.v1.IndexOrBuilder getIndexesOrBuilder(int index) { return indexes_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < indexes_.size(); i++) { output.writeMessage(1, indexes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < indexes_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, indexes_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.visionai.v1.ListIndexesResponse)) { return super.equals(obj); } com.google.cloud.visionai.v1.ListIndexesResponse other = (com.google.cloud.visionai.v1.ListIndexesResponse) obj; if (!getIndexesList().equals(other.getIndexesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getIndexesCount() > 0) { hash = (37 * hash) + INDEXES_FIELD_NUMBER; hash = (53 * hash) + getIndexesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.visionai.v1.ListIndexesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.visionai.v1.ListIndexesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for ListIndexes. * </pre> * * Protobuf type {@code google.cloud.visionai.v1.ListIndexesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListIndexesResponse) com.google.cloud.visionai.v1.ListIndexesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.visionai.v1.ListIndexesResponse.class, com.google.cloud.visionai.v1.ListIndexesResponse.Builder.class); } // Construct using com.google.cloud.visionai.v1.ListIndexesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (indexesBuilder_ == null) { indexes_ = java.util.Collections.emptyList(); } else { indexes_ = null; indexesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.visionai.v1.WarehouseProto .internal_static_google_cloud_visionai_v1_ListIndexesResponse_descriptor; } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexesResponse getDefaultInstanceForType() { return com.google.cloud.visionai.v1.ListIndexesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexesResponse build() { com.google.cloud.visionai.v1.ListIndexesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexesResponse buildPartial() { com.google.cloud.visionai.v1.ListIndexesResponse result = new com.google.cloud.visionai.v1.ListIndexesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.visionai.v1.ListIndexesResponse result) { if (indexesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { indexes_ = java.util.Collections.unmodifiableList(indexes_); bitField0_ = (bitField0_ & ~0x00000001); } result.indexes_ = indexes_; } else { result.indexes_ = indexesBuilder_.build(); } } private void buildPartial0(com.google.cloud.visionai.v1.ListIndexesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.visionai.v1.ListIndexesResponse) { return mergeFrom((com.google.cloud.visionai.v1.ListIndexesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.visionai.v1.ListIndexesResponse other) { if (other == com.google.cloud.visionai.v1.ListIndexesResponse.getDefaultInstance()) return this; if (indexesBuilder_ == null) { if (!other.indexes_.isEmpty()) { if (indexes_.isEmpty()) { indexes_ = other.indexes_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureIndexesIsMutable(); indexes_.addAll(other.indexes_); } onChanged(); } } else { if (!other.indexes_.isEmpty()) { if (indexesBuilder_.isEmpty()) { indexesBuilder_.dispose(); indexesBuilder_ = null; indexes_ = other.indexes_; bitField0_ = (bitField0_ & ~0x00000001); indexesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getIndexesFieldBuilder() : null; } else { indexesBuilder_.addAllMessages(other.indexes_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.visionai.v1.Index m = input.readMessage( com.google.cloud.visionai.v1.Index.parser(), extensionRegistry); if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.add(m); } else { indexesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.visionai.v1.Index> indexes_ = java.util.Collections.emptyList(); private void ensureIndexesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { indexes_ = new java.util.ArrayList<com.google.cloud.visionai.v1.Index>(indexes_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Index, com.google.cloud.visionai.v1.Index.Builder, com.google.cloud.visionai.v1.IndexOrBuilder> indexesBuilder_; /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Index> getIndexesList() { if (indexesBuilder_ == null) { return java.util.Collections.unmodifiableList(indexes_); } else { return indexesBuilder_.getMessageList(); } } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public int getIndexesCount() { if (indexesBuilder_ == null) { return indexes_.size(); } else { return indexesBuilder_.getCount(); } } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public com.google.cloud.visionai.v1.Index getIndexes(int index) { if (indexesBuilder_ == null) { return indexes_.get(index); } else { return indexesBuilder_.getMessage(index); } } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder setIndexes(int index, com.google.cloud.visionai.v1.Index value) { if (indexesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexesIsMutable(); indexes_.set(index, value); onChanged(); } else { indexesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder setIndexes( int index, com.google.cloud.visionai.v1.Index.Builder builderForValue) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.set(index, builderForValue.build()); onChanged(); } else { indexesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder addIndexes(com.google.cloud.visionai.v1.Index value) { if (indexesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexesIsMutable(); indexes_.add(value); onChanged(); } else { indexesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder addIndexes(int index, com.google.cloud.visionai.v1.Index value) { if (indexesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureIndexesIsMutable(); indexes_.add(index, value); onChanged(); } else { indexesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder addIndexes(com.google.cloud.visionai.v1.Index.Builder builderForValue) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.add(builderForValue.build()); onChanged(); } else { indexesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder addIndexes( int index, com.google.cloud.visionai.v1.Index.Builder builderForValue) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.add(index, builderForValue.build()); onChanged(); } else { indexesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder addAllIndexes( java.lang.Iterable<? extends com.google.cloud.visionai.v1.Index> values) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, indexes_); onChanged(); } else { indexesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder clearIndexes() { if (indexesBuilder_ == null) { indexes_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { indexesBuilder_.clear(); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public Builder removeIndexes(int index) { if (indexesBuilder_ == null) { ensureIndexesIsMutable(); indexes_.remove(index); onChanged(); } else { indexesBuilder_.remove(index); } return this; } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public com.google.cloud.visionai.v1.Index.Builder getIndexesBuilder(int index) { return getIndexesFieldBuilder().getBuilder(index); } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public com.google.cloud.visionai.v1.IndexOrBuilder getIndexesOrBuilder(int index) { if (indexesBuilder_ == null) { return indexes_.get(index); } else { return indexesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public java.util.List<? extends com.google.cloud.visionai.v1.IndexOrBuilder> getIndexesOrBuilderList() { if (indexesBuilder_ != null) { return indexesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(indexes_); } } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public com.google.cloud.visionai.v1.Index.Builder addIndexesBuilder() { return getIndexesFieldBuilder() .addBuilder(com.google.cloud.visionai.v1.Index.getDefaultInstance()); } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public com.google.cloud.visionai.v1.Index.Builder addIndexesBuilder(int index) { return getIndexesFieldBuilder() .addBuilder(index, com.google.cloud.visionai.v1.Index.getDefaultInstance()); } /** * * * <pre> * The indexes under the specified corpus. * </pre> * * <code>repeated .google.cloud.visionai.v1.Index indexes = 1;</code> */ public java.util.List<com.google.cloud.visionai.v1.Index.Builder> getIndexesBuilderList() { return getIndexesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Index, com.google.cloud.visionai.v1.Index.Builder, com.google.cloud.visionai.v1.IndexOrBuilder> getIndexesFieldBuilder() { if (indexesBuilder_ == null) { indexesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.visionai.v1.Index, com.google.cloud.visionai.v1.Index.Builder, com.google.cloud.visionai.v1.IndexOrBuilder>( indexes_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); indexes_ = null; } return indexesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListIndexesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListIndexesResponse) private static final com.google.cloud.visionai.v1.ListIndexesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListIndexesResponse(); } public static com.google.cloud.visionai.v1.ListIndexesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListIndexesResponse> PARSER = new com.google.protobuf.AbstractParser<ListIndexesResponse>() { @java.lang.Override public ListIndexesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListIndexesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListIndexesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.visionai.v1.ListIndexesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-api-java-client-services
35,527
clients/google-api-services-cloudtrace/v1/1.30.1/com/google/api/services/cloudtrace/v1/CloudTrace.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.cloudtrace.v1; /** * Service definition for CloudTrace (v1). * * <p> * Sends application trace data to Cloud Trace for viewing. Trace data is collected for all App Engine applications by default. Trace data from other applications can be provided using this API. This library is used to interact with the Cloud Trace API directly. If you are looking to instrument your application for Cloud Trace, we recommend using OpenCensus. * </p> * * <p> * For more information about this service, see the * <a href="https://cloud.google.com/trace" target="_blank">API Documentation</a> * </p> * * <p> * This service uses {@link CloudTraceRequestInitializer} to initialize global parameters via its * {@link Builder}. * </p> * * @since 1.3 * @author Google, Inc. */ @SuppressWarnings("javadoc") public class CloudTrace extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient { // Note: Leave this static initializer at the top of the file. static { com.google.api.client.util.Preconditions.checkState( com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 && com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15, "You are currently running with version %s of google-api-client. " + "You need at least version 1.15 of google-api-client to run version " + "1.30.10 of the Cloud Trace API library.", com.google.api.client.googleapis.GoogleUtils.VERSION); } /** * The default encoded root URL of the service. This is determined when the library is generated * and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_ROOT_URL = "https://cloudtrace.googleapis.com/"; /** * The default encoded service path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.7 */ public static final String DEFAULT_SERVICE_PATH = ""; /** * The default encoded batch path of the service. This is determined when the library is * generated and normally should not be changed. * * @since 1.23 */ public static final String DEFAULT_BATCH_PATH = "batch"; /** * The default encoded base URL of the service. This is determined when the library is generated * and normally should not be changed. */ public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH; /** * Constructor. * * <p> * Use {@link Builder} if you need to specify any of the optional parameters. * </p> * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public CloudTrace(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { this(new Builder(transport, jsonFactory, httpRequestInitializer)); } /** * @param builder builder */ CloudTrace(Builder builder) { super(builder); } @Override protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException { super.initialize(httpClientRequest); } /** * An accessor for creating requests from the Projects collection. * * <p>The typical use is:</p> * <pre> * {@code CloudTrace cloudtrace = new CloudTrace(...);} * {@code CloudTrace.Projects.List request = cloudtrace.projects().list(parameters ...)} * </pre> * * @return the resource collection */ public Projects projects() { return new Projects(); } /** * The "projects" collection of methods. */ public class Projects { /** * Sends new traces to Cloud Trace or updates existing traces. If the ID of a trace that you send * matches that of an existing trace, any fields in the existing trace and its spans are overwritten * by the provided values, and any new fields provided are merged with the existing trace data. If * the ID does not match, a new trace is created. * * Create a request for the method "projects.patchTraces". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link PatchTraces#execute()} method to invoke the remote operation. * * @param projectId Required. ID of the Cloud project where the trace data is stored. * @param content the {@link com.google.api.services.cloudtrace.v1.model.Traces} * @return the request */ public PatchTraces patchTraces(java.lang.String projectId, com.google.api.services.cloudtrace.v1.model.Traces content) throws java.io.IOException { PatchTraces result = new PatchTraces(projectId, content); initialize(result); return result; } public class PatchTraces extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.Empty> { private static final String REST_PATH = "v1/projects/{projectId}/traces"; /** * Sends new traces to Cloud Trace or updates existing traces. If the ID of a trace that you send * matches that of an existing trace, any fields in the existing trace and its spans are * overwritten by the provided values, and any new fields provided are merged with the existing * trace data. If the ID does not match, a new trace is created. * * Create a request for the method "projects.patchTraces". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link PatchTraces#execute()} method to invoke the remote * operation. <p> {@link * PatchTraces#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param projectId Required. ID of the Cloud project where the trace data is stored. * @param content the {@link com.google.api.services.cloudtrace.v1.model.Traces} * @since 1.13 */ protected PatchTraces(java.lang.String projectId, com.google.api.services.cloudtrace.v1.model.Traces content) { super(CloudTrace.this, "PATCH", REST_PATH, content, com.google.api.services.cloudtrace.v1.model.Empty.class); this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified."); } @Override public PatchTraces set$Xgafv(java.lang.String $Xgafv) { return (PatchTraces) super.set$Xgafv($Xgafv); } @Override public PatchTraces setAccessToken(java.lang.String accessToken) { return (PatchTraces) super.setAccessToken(accessToken); } @Override public PatchTraces setAlt(java.lang.String alt) { return (PatchTraces) super.setAlt(alt); } @Override public PatchTraces setCallback(java.lang.String callback) { return (PatchTraces) super.setCallback(callback); } @Override public PatchTraces setFields(java.lang.String fields) { return (PatchTraces) super.setFields(fields); } @Override public PatchTraces setKey(java.lang.String key) { return (PatchTraces) super.setKey(key); } @Override public PatchTraces setOauthToken(java.lang.String oauthToken) { return (PatchTraces) super.setOauthToken(oauthToken); } @Override public PatchTraces setPrettyPrint(java.lang.Boolean prettyPrint) { return (PatchTraces) super.setPrettyPrint(prettyPrint); } @Override public PatchTraces setQuotaUser(java.lang.String quotaUser) { return (PatchTraces) super.setQuotaUser(quotaUser); } @Override public PatchTraces setUploadType(java.lang.String uploadType) { return (PatchTraces) super.setUploadType(uploadType); } @Override public PatchTraces setUploadProtocol(java.lang.String uploadProtocol) { return (PatchTraces) super.setUploadProtocol(uploadProtocol); } /** Required. ID of the Cloud project where the trace data is stored. */ @com.google.api.client.util.Key private java.lang.String projectId; /** Required. ID of the Cloud project where the trace data is stored. */ public java.lang.String getProjectId() { return projectId; } /** Required. ID of the Cloud project where the trace data is stored. */ public PatchTraces setProjectId(java.lang.String projectId) { this.projectId = projectId; return this; } @Override public PatchTraces set(String parameterName, Object value) { return (PatchTraces) super.set(parameterName, value); } } /** * An accessor for creating requests from the Traces collection. * * <p>The typical use is:</p> * <pre> * {@code CloudTrace cloudtrace = new CloudTrace(...);} * {@code CloudTrace.Traces.List request = cloudtrace.traces().list(parameters ...)} * </pre> * * @return the resource collection */ public Traces traces() { return new Traces(); } /** * The "traces" collection of methods. */ public class Traces { /** * Gets a single trace by its ID. * * Create a request for the method "traces.get". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link Get#execute()} method to invoke the remote operation. * * @param projectId Required. ID of the Cloud project where the trace data is stored. * @param traceId Required. ID of the trace to return. * @return the request */ public Get get(java.lang.String projectId, java.lang.String traceId) throws java.io.IOException { Get result = new Get(projectId, traceId); initialize(result); return result; } public class Get extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.Trace> { private static final String REST_PATH = "v1/projects/{projectId}/traces/{traceId}"; /** * Gets a single trace by its ID. * * Create a request for the method "traces.get". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link Get#execute()} method to invoke the remote operation. <p> * {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param projectId Required. ID of the Cloud project where the trace data is stored. * @param traceId Required. ID of the trace to return. * @since 1.13 */ protected Get(java.lang.String projectId, java.lang.String traceId) { super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v1.model.Trace.class); this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified."); this.traceId = com.google.api.client.util.Preconditions.checkNotNull(traceId, "Required parameter traceId must be specified."); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public Get set$Xgafv(java.lang.String $Xgafv) { return (Get) super.set$Xgafv($Xgafv); } @Override public Get setAccessToken(java.lang.String accessToken) { return (Get) super.setAccessToken(accessToken); } @Override public Get setAlt(java.lang.String alt) { return (Get) super.setAlt(alt); } @Override public Get setCallback(java.lang.String callback) { return (Get) super.setCallback(callback); } @Override public Get setFields(java.lang.String fields) { return (Get) super.setFields(fields); } @Override public Get setKey(java.lang.String key) { return (Get) super.setKey(key); } @Override public Get setOauthToken(java.lang.String oauthToken) { return (Get) super.setOauthToken(oauthToken); } @Override public Get setPrettyPrint(java.lang.Boolean prettyPrint) { return (Get) super.setPrettyPrint(prettyPrint); } @Override public Get setQuotaUser(java.lang.String quotaUser) { return (Get) super.setQuotaUser(quotaUser); } @Override public Get setUploadType(java.lang.String uploadType) { return (Get) super.setUploadType(uploadType); } @Override public Get setUploadProtocol(java.lang.String uploadProtocol) { return (Get) super.setUploadProtocol(uploadProtocol); } /** Required. ID of the Cloud project where the trace data is stored. */ @com.google.api.client.util.Key private java.lang.String projectId; /** Required. ID of the Cloud project where the trace data is stored. */ public java.lang.String getProjectId() { return projectId; } /** Required. ID of the Cloud project where the trace data is stored. */ public Get setProjectId(java.lang.String projectId) { this.projectId = projectId; return this; } /** Required. ID of the trace to return. */ @com.google.api.client.util.Key private java.lang.String traceId; /** Required. ID of the trace to return. */ public java.lang.String getTraceId() { return traceId; } /** Required. ID of the trace to return. */ public Get setTraceId(java.lang.String traceId) { this.traceId = traceId; return this; } @Override public Get set(String parameterName, Object value) { return (Get) super.set(parameterName, value); } } /** * Returns of a list of traces that match the specified filter conditions. * * Create a request for the method "traces.list". * * This request holds the parameters needed by the cloudtrace server. After setting any optional * parameters, call the {@link List#execute()} method to invoke the remote operation. * * @param projectId Required. ID of the Cloud project where the trace data is stored. * @return the request */ public List list(java.lang.String projectId) throws java.io.IOException { List result = new List(projectId); initialize(result); return result; } public class List extends CloudTraceRequest<com.google.api.services.cloudtrace.v1.model.ListTracesResponse> { private static final String REST_PATH = "v1/projects/{projectId}/traces"; /** * Returns of a list of traces that match the specified filter conditions. * * Create a request for the method "traces.list". * * This request holds the parameters needed by the the cloudtrace server. After setting any * optional parameters, call the {@link List#execute()} method to invoke the remote operation. <p> * {@link List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} * must be called to initialize this instance immediately after invoking the constructor. </p> * * @param projectId Required. ID of the Cloud project where the trace data is stored. * @since 1.13 */ protected List(java.lang.String projectId) { super(CloudTrace.this, "GET", REST_PATH, null, com.google.api.services.cloudtrace.v1.model.ListTracesResponse.class); this.projectId = com.google.api.client.util.Preconditions.checkNotNull(projectId, "Required parameter projectId must be specified."); } @Override public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException { return super.executeUsingHead(); } @Override public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException { return super.buildHttpRequestUsingHead(); } @Override public List set$Xgafv(java.lang.String $Xgafv) { return (List) super.set$Xgafv($Xgafv); } @Override public List setAccessToken(java.lang.String accessToken) { return (List) super.setAccessToken(accessToken); } @Override public List setAlt(java.lang.String alt) { return (List) super.setAlt(alt); } @Override public List setCallback(java.lang.String callback) { return (List) super.setCallback(callback); } @Override public List setFields(java.lang.String fields) { return (List) super.setFields(fields); } @Override public List setKey(java.lang.String key) { return (List) super.setKey(key); } @Override public List setOauthToken(java.lang.String oauthToken) { return (List) super.setOauthToken(oauthToken); } @Override public List setPrettyPrint(java.lang.Boolean prettyPrint) { return (List) super.setPrettyPrint(prettyPrint); } @Override public List setQuotaUser(java.lang.String quotaUser) { return (List) super.setQuotaUser(quotaUser); } @Override public List setUploadType(java.lang.String uploadType) { return (List) super.setUploadType(uploadType); } @Override public List setUploadProtocol(java.lang.String uploadProtocol) { return (List) super.setUploadProtocol(uploadProtocol); } /** Required. ID of the Cloud project where the trace data is stored. */ @com.google.api.client.util.Key private java.lang.String projectId; /** Required. ID of the Cloud project where the trace data is stored. */ public java.lang.String getProjectId() { return projectId; } /** Required. ID of the Cloud project where the trace data is stored. */ public List setProjectId(java.lang.String projectId) { this.projectId = projectId; return this; } /** * End of the time interval (inclusive) during which the trace data was collected from the * application. */ @com.google.api.client.util.Key private String endTime; /** End of the time interval (inclusive) during which the trace data was collected from the application. */ public String getEndTime() { return endTime; } /** * End of the time interval (inclusive) during which the trace data was collected from the * application. */ public List setEndTime(String endTime) { this.endTime = endTime; return this; } /** * Optional. A filter against labels for the request. By default, searches use prefix * matching. To specify exact match, prepend a plus symbol (`+`) to the search term. * Multiple terms are ANDed. Syntax: * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces * where any root span starts with `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces * where any root span's name is exactly `NAME`. * `span:NAME_PREFIX`: Return traces where * any span starts with `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name * is exactly `NAME`. * `latency:DURATION`: Return traces whose overall latency is greater * or equal to than `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), * and seconds (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose * overall latency is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return * all traces containing the specified label key (exact match, case-sensitive) regardless of * the key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return * all traces containing the specified label key (exact match, case-sensitive) whose value * starts with `VALUE_PREFIX`. Both a key and a value must be specified. * * `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the * specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent to * `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`. */ @com.google.api.client.util.Key private java.lang.String filter; /** Optional. A filter against labels for the request. By default, searches use prefix matching. To specify exact match, prepend a plus symbol (`+`) to the search term. Multiple terms are ANDed. Syntax: * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces where any root span starts with `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces where any root span's name is exactly `NAME`. * `span:NAME_PREFIX`: Return traces where any span starts with `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name is exactly `NAME`. * `latency:DURATION`: Return traces whose overall latency is greater or equal to than `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), and seconds (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose overall latency is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return all traces containing the specified label key (exact match, case- sensitive) regardless of the key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return all traces containing the specified label key (exact match, case- sensitive) whose value starts with `VALUE_PREFIX`. Both a key and a value must be specified. * `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent to `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`. */ public java.lang.String getFilter() { return filter; } /** * Optional. A filter against labels for the request. By default, searches use prefix * matching. To specify exact match, prepend a plus symbol (`+`) to the search term. * Multiple terms are ANDed. Syntax: * `root:NAME_PREFIX` or `NAME_PREFIX`: Return traces * where any root span starts with `NAME_PREFIX`. * `+root:NAME` or `+NAME`: Return traces * where any root span's name is exactly `NAME`. * `span:NAME_PREFIX`: Return traces where * any span starts with `NAME_PREFIX`. * `+span:NAME`: Return traces where any span's name * is exactly `NAME`. * `latency:DURATION`: Return traces whose overall latency is greater * or equal to than `DURATION`. Accepted units are nanoseconds (`ns`), milliseconds (`ms`), * and seconds (`s`). Default is `ms`. For example, `latency:24ms` returns traces whose * overall latency is greater than or equal to 24 milliseconds. * `label:LABEL_KEY`: Return * all traces containing the specified label key (exact match, case-sensitive) regardless of * the key:value pair's value (including empty values). * `LABEL_KEY:VALUE_PREFIX`: Return * all traces containing the specified label key (exact match, case-sensitive) whose value * starts with `VALUE_PREFIX`. Both a key and a value must be specified. * * `+LABEL_KEY:VALUE`: Return all traces containing a key:value pair exactly matching the * specified text. Both a key and a value must be specified. * `method:VALUE`: Equivalent to * `/http/method:VALUE`. * `url:VALUE`: Equivalent to `/http/url:VALUE`. */ public List setFilter(java.lang.String filter) { this.filter = filter; return this; } /** * Optional. Field used to sort the returned traces. Can be one of the following: * * `trace_id` * `name` (`name` field of root span in the trace) * `duration` (difference * between `end_time` and `start_time` fields of the root span) * `start` (`start_time` * field of the root span) Descending order can be specified by appending `desc` to the sort * field (for example, `name desc`). Only one sort field is permitted. */ @com.google.api.client.util.Key private java.lang.String orderBy; /** Optional. Field used to sort the returned traces. Can be one of the following: * `trace_id` * `name` (`name` field of root span in the trace) * `duration` (difference between `end_time` and `start_time` fields of the root span) * `start` (`start_time` field of the root span) Descending order can be specified by appending `desc` to the sort field (for example, `name desc`). Only one sort field is permitted. */ public java.lang.String getOrderBy() { return orderBy; } /** * Optional. Field used to sort the returned traces. Can be one of the following: * * `trace_id` * `name` (`name` field of root span in the trace) * `duration` (difference * between `end_time` and `start_time` fields of the root span) * `start` (`start_time` * field of the root span) Descending order can be specified by appending `desc` to the sort * field (for example, `name desc`). Only one sort field is permitted. */ public List setOrderBy(java.lang.String orderBy) { this.orderBy = orderBy; return this; } /** * Optional. Maximum number of traces to return. If not specified or <= 0, the * implementation selects a reasonable value. The implementation may return fewer traces * than the requested page size. */ @com.google.api.client.util.Key private java.lang.Integer pageSize; /** Optional. Maximum number of traces to return. If not specified or <= 0, the implementation selects a reasonable value. The implementation may return fewer traces than the requested page size. */ public java.lang.Integer getPageSize() { return pageSize; } /** * Optional. Maximum number of traces to return. If not specified or <= 0, the * implementation selects a reasonable value. The implementation may return fewer traces * than the requested page size. */ public List setPageSize(java.lang.Integer pageSize) { this.pageSize = pageSize; return this; } /** * Token identifying the page of results to return. If provided, use the value of the * `next_page_token` field from a previous request. */ @com.google.api.client.util.Key private java.lang.String pageToken; /** Token identifying the page of results to return. If provided, use the value of the `next_page_token` field from a previous request. */ public java.lang.String getPageToken() { return pageToken; } /** * Token identifying the page of results to return. If provided, use the value of the * `next_page_token` field from a previous request. */ public List setPageToken(java.lang.String pageToken) { this.pageToken = pageToken; return this; } /** * Start of the time interval (inclusive) during which the trace data was collected from the * application. */ @com.google.api.client.util.Key private String startTime; /** Start of the time interval (inclusive) during which the trace data was collected from the application. */ public String getStartTime() { return startTime; } /** * Start of the time interval (inclusive) during which the trace data was collected from the * application. */ public List setStartTime(String startTime) { this.startTime = startTime; return this; } /** Optional. Type of data returned for traces in the list. Default is `MINIMAL`. */ @com.google.api.client.util.Key private java.lang.String view; /** Optional. Type of data returned for traces in the list. Default is `MINIMAL`. */ public java.lang.String getView() { return view; } /** Optional. Type of data returned for traces in the list. Default is `MINIMAL`. */ public List setView(java.lang.String view) { this.view = view; return this; } @Override public List set(String parameterName, Object value) { return (List) super.set(parameterName, value); } } } } /** * Builder for {@link CloudTrace}. * * <p> * Implementation is not thread-safe. * </p> * * @since 1.3.0 */ public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder { /** * Returns an instance of a new builder. * * @param transport HTTP transport, which should normally be: * <ul> * <li>Google App Engine: * {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li> * <li>Android: {@code newCompatibleTransport} from * {@code com.google.api.client.extensions.android.http.AndroidHttp}</li> * <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()} * </li> * </ul> * @param jsonFactory JSON factory, which may be: * <ul> * <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li> * <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li> * <li>Android Honeycomb or higher: * {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li> * </ul> * @param httpRequestInitializer HTTP request initializer or {@code null} for none * @since 1.7 */ public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory, com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { super( transport, jsonFactory, DEFAULT_ROOT_URL, DEFAULT_SERVICE_PATH, httpRequestInitializer, false); setBatchPath(DEFAULT_BATCH_PATH); } /** Builds a new instance of {@link CloudTrace}. */ @Override public CloudTrace build() { return new CloudTrace(this); } @Override public Builder setRootUrl(String rootUrl) { return (Builder) super.setRootUrl(rootUrl); } @Override public Builder setServicePath(String servicePath) { return (Builder) super.setServicePath(servicePath); } @Override public Builder setBatchPath(String batchPath) { return (Builder) super.setBatchPath(batchPath); } @Override public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) { return (Builder) super.setHttpRequestInitializer(httpRequestInitializer); } @Override public Builder setApplicationName(String applicationName) { return (Builder) super.setApplicationName(applicationName); } @Override public Builder setSuppressPatternChecks(boolean suppressPatternChecks) { return (Builder) super.setSuppressPatternChecks(suppressPatternChecks); } @Override public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) { return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks); } @Override public Builder setSuppressAllChecks(boolean suppressAllChecks) { return (Builder) super.setSuppressAllChecks(suppressAllChecks); } /** * Set the {@link CloudTraceRequestInitializer}. * * @since 1.12 */ public Builder setCloudTraceRequestInitializer( CloudTraceRequestInitializer cloudtraceRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(cloudtraceRequestInitializer); } @Override public Builder setGoogleClientRequestInitializer( com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) { return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer); } } }
googleapis/google-cloud-java
35,283
java-biglake/proto-google-cloud-biglake-v1/src/main/java/com/google/cloud/biglake/v1/ListIcebergNamespacesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/biglake/v1/iceberg_rest_catalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.biglake.v1; /** * * * <pre> * The response message for the `ListIcebergNamespaces` API. * </pre> * * Protobuf type {@code google.cloud.biglake.v1.ListIcebergNamespacesResponse} */ public final class ListIcebergNamespacesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.biglake.v1.ListIcebergNamespacesResponse) ListIcebergNamespacesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListIcebergNamespacesResponse.newBuilder() to construct. private ListIcebergNamespacesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListIcebergNamespacesResponse() { namespaces_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListIcebergNamespacesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.biglake.v1.IcebergRestCatalogProto .internal_static_google_cloud_biglake_v1_ListIcebergNamespacesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.biglake.v1.IcebergRestCatalogProto .internal_static_google_cloud_biglake_v1_ListIcebergNamespacesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.class, com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.Builder.class); } public static final int NAMESPACES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.protobuf.ListValue> namespaces_; /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ @java.lang.Override public java.util.List<com.google.protobuf.ListValue> getNamespacesList() { return namespaces_; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.protobuf.ListValueOrBuilder> getNamespacesOrBuilderList() { return namespaces_; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ @java.lang.Override public int getNamespacesCount() { return namespaces_.size(); } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ @java.lang.Override public com.google.protobuf.ListValue getNamespaces(int index) { return namespaces_.get(index); } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ @java.lang.Override public com.google.protobuf.ListValueOrBuilder getNamespacesOrBuilder(int index) { return namespaces_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < namespaces_.size(); i++) { output.writeMessage(1, namespaces_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < namespaces_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, namespaces_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.biglake.v1.ListIcebergNamespacesResponse)) { return super.equals(obj); } com.google.cloud.biglake.v1.ListIcebergNamespacesResponse other = (com.google.cloud.biglake.v1.ListIcebergNamespacesResponse) obj; if (!getNamespacesList().equals(other.getNamespacesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getNamespacesCount() > 0) { hash = (37 * hash) + NAMESPACES_FIELD_NUMBER; hash = (53 * hash) + getNamespacesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.biglake.v1.ListIcebergNamespacesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for the `ListIcebergNamespaces` API. * </pre> * * Protobuf type {@code google.cloud.biglake.v1.ListIcebergNamespacesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.biglake.v1.ListIcebergNamespacesResponse) com.google.cloud.biglake.v1.ListIcebergNamespacesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.biglake.v1.IcebergRestCatalogProto .internal_static_google_cloud_biglake_v1_ListIcebergNamespacesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.biglake.v1.IcebergRestCatalogProto .internal_static_google_cloud_biglake_v1_ListIcebergNamespacesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.class, com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.Builder.class); } // Construct using com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (namespacesBuilder_ == null) { namespaces_ = java.util.Collections.emptyList(); } else { namespaces_ = null; namespacesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.biglake.v1.IcebergRestCatalogProto .internal_static_google_cloud_biglake_v1_ListIcebergNamespacesResponse_descriptor; } @java.lang.Override public com.google.cloud.biglake.v1.ListIcebergNamespacesResponse getDefaultInstanceForType() { return com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.biglake.v1.ListIcebergNamespacesResponse build() { com.google.cloud.biglake.v1.ListIcebergNamespacesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.biglake.v1.ListIcebergNamespacesResponse buildPartial() { com.google.cloud.biglake.v1.ListIcebergNamespacesResponse result = new com.google.cloud.biglake.v1.ListIcebergNamespacesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.biglake.v1.ListIcebergNamespacesResponse result) { if (namespacesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { namespaces_ = java.util.Collections.unmodifiableList(namespaces_); bitField0_ = (bitField0_ & ~0x00000001); } result.namespaces_ = namespaces_; } else { result.namespaces_ = namespacesBuilder_.build(); } } private void buildPartial0(com.google.cloud.biglake.v1.ListIcebergNamespacesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.biglake.v1.ListIcebergNamespacesResponse) { return mergeFrom((com.google.cloud.biglake.v1.ListIcebergNamespacesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.biglake.v1.ListIcebergNamespacesResponse other) { if (other == com.google.cloud.biglake.v1.ListIcebergNamespacesResponse.getDefaultInstance()) return this; if (namespacesBuilder_ == null) { if (!other.namespaces_.isEmpty()) { if (namespaces_.isEmpty()) { namespaces_ = other.namespaces_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureNamespacesIsMutable(); namespaces_.addAll(other.namespaces_); } onChanged(); } } else { if (!other.namespaces_.isEmpty()) { if (namespacesBuilder_.isEmpty()) { namespacesBuilder_.dispose(); namespacesBuilder_ = null; namespaces_ = other.namespaces_; bitField0_ = (bitField0_ & ~0x00000001); namespacesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getNamespacesFieldBuilder() : null; } else { namespacesBuilder_.addAllMessages(other.namespaces_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.ListValue m = input.readMessage(com.google.protobuf.ListValue.parser(), extensionRegistry); if (namespacesBuilder_ == null) { ensureNamespacesIsMutable(); namespaces_.add(m); } else { namespacesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.protobuf.ListValue> namespaces_ = java.util.Collections.emptyList(); private void ensureNamespacesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { namespaces_ = new java.util.ArrayList<com.google.protobuf.ListValue>(namespaces_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.protobuf.ListValue, com.google.protobuf.ListValue.Builder, com.google.protobuf.ListValueOrBuilder> namespacesBuilder_; /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public java.util.List<com.google.protobuf.ListValue> getNamespacesList() { if (namespacesBuilder_ == null) { return java.util.Collections.unmodifiableList(namespaces_); } else { return namespacesBuilder_.getMessageList(); } } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public int getNamespacesCount() { if (namespacesBuilder_ == null) { return namespaces_.size(); } else { return namespacesBuilder_.getCount(); } } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public com.google.protobuf.ListValue getNamespaces(int index) { if (namespacesBuilder_ == null) { return namespaces_.get(index); } else { return namespacesBuilder_.getMessage(index); } } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder setNamespaces(int index, com.google.protobuf.ListValue value) { if (namespacesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNamespacesIsMutable(); namespaces_.set(index, value); onChanged(); } else { namespacesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder setNamespaces(int index, com.google.protobuf.ListValue.Builder builderForValue) { if (namespacesBuilder_ == null) { ensureNamespacesIsMutable(); namespaces_.set(index, builderForValue.build()); onChanged(); } else { namespacesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder addNamespaces(com.google.protobuf.ListValue value) { if (namespacesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNamespacesIsMutable(); namespaces_.add(value); onChanged(); } else { namespacesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder addNamespaces(int index, com.google.protobuf.ListValue value) { if (namespacesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureNamespacesIsMutable(); namespaces_.add(index, value); onChanged(); } else { namespacesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder addNamespaces(com.google.protobuf.ListValue.Builder builderForValue) { if (namespacesBuilder_ == null) { ensureNamespacesIsMutable(); namespaces_.add(builderForValue.build()); onChanged(); } else { namespacesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder addNamespaces(int index, com.google.protobuf.ListValue.Builder builderForValue) { if (namespacesBuilder_ == null) { ensureNamespacesIsMutable(); namespaces_.add(index, builderForValue.build()); onChanged(); } else { namespacesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder addAllNamespaces( java.lang.Iterable<? extends com.google.protobuf.ListValue> values) { if (namespacesBuilder_ == null) { ensureNamespacesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, namespaces_); onChanged(); } else { namespacesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder clearNamespaces() { if (namespacesBuilder_ == null) { namespaces_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { namespacesBuilder_.clear(); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public Builder removeNamespaces(int index) { if (namespacesBuilder_ == null) { ensureNamespacesIsMutable(); namespaces_.remove(index); onChanged(); } else { namespacesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public com.google.protobuf.ListValue.Builder getNamespacesBuilder(int index) { return getNamespacesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public com.google.protobuf.ListValueOrBuilder getNamespacesOrBuilder(int index) { if (namespacesBuilder_ == null) { return namespaces_.get(index); } else { return namespacesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public java.util.List<? extends com.google.protobuf.ListValueOrBuilder> getNamespacesOrBuilderList() { if (namespacesBuilder_ != null) { return namespacesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(namespaces_); } } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public com.google.protobuf.ListValue.Builder addNamespacesBuilder() { return getNamespacesFieldBuilder() .addBuilder(com.google.protobuf.ListValue.getDefaultInstance()); } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public com.google.protobuf.ListValue.Builder addNamespacesBuilder(int index) { return getNamespacesFieldBuilder() .addBuilder(index, com.google.protobuf.ListValue.getDefaultInstance()); } /** * * * <pre> * The list of namespaces. * </pre> * * <code>repeated .google.protobuf.ListValue namespaces = 1;</code> */ public java.util.List<com.google.protobuf.ListValue.Builder> getNamespacesBuilderList() { return getNamespacesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.protobuf.ListValue, com.google.protobuf.ListValue.Builder, com.google.protobuf.ListValueOrBuilder> getNamespacesFieldBuilder() { if (namespacesBuilder_ == null) { namespacesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.protobuf.ListValue, com.google.protobuf.ListValue.Builder, com.google.protobuf.ListValueOrBuilder>( namespaces_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); namespaces_ = null; } return namespacesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The next page token for pagination. * </pre> * * <code>string next_page_token = 2 [json_name = "next-page-token"];</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.biglake.v1.ListIcebergNamespacesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.biglake.v1.ListIcebergNamespacesResponse) private static final com.google.cloud.biglake.v1.ListIcebergNamespacesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.biglake.v1.ListIcebergNamespacesResponse(); } public static com.google.cloud.biglake.v1.ListIcebergNamespacesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListIcebergNamespacesResponse> PARSER = new com.google.protobuf.AbstractParser<ListIcebergNamespacesResponse>() { @java.lang.Override public ListIcebergNamespacesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListIcebergNamespacesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListIcebergNamespacesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.biglake.v1.ListIcebergNamespacesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,387
java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/UpdateServiceLevelObjectiveRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/service_service.proto // Protobuf Java Version: 3.25.8 package com.google.monitoring.v3; /** * * * <pre> * The `UpdateServiceLevelObjective` request. * </pre> * * Protobuf type {@code google.monitoring.v3.UpdateServiceLevelObjectiveRequest} */ public final class UpdateServiceLevelObjectiveRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.monitoring.v3.UpdateServiceLevelObjectiveRequest) UpdateServiceLevelObjectiveRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateServiceLevelObjectiveRequest.newBuilder() to construct. private UpdateServiceLevelObjectiveRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateServiceLevelObjectiveRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateServiceLevelObjectiveRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.ServiceMonitoringServiceProto .internal_static_google_monitoring_v3_UpdateServiceLevelObjectiveRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.ServiceMonitoringServiceProto .internal_static_google_monitoring_v3_UpdateServiceLevelObjectiveRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.class, com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.Builder.class); } private int bitField0_; public static final int SERVICE_LEVEL_OBJECTIVE_FIELD_NUMBER = 1; private com.google.monitoring.v3.ServiceLevelObjective serviceLevelObjective_; /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the serviceLevelObjective field is set. */ @java.lang.Override public boolean hasServiceLevelObjective() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The serviceLevelObjective. */ @java.lang.Override public com.google.monitoring.v3.ServiceLevelObjective getServiceLevelObjective() { return serviceLevelObjective_ == null ? com.google.monitoring.v3.ServiceLevelObjective.getDefaultInstance() : serviceLevelObjective_; } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.monitoring.v3.ServiceLevelObjectiveOrBuilder getServiceLevelObjectiveOrBuilder() { return serviceLevelObjective_ == null ? com.google.monitoring.v3.ServiceLevelObjective.getDefaultInstance() : serviceLevelObjective_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getServiceLevelObjective()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getServiceLevelObjective()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest)) { return super.equals(obj); } com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest other = (com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest) obj; if (hasServiceLevelObjective() != other.hasServiceLevelObjective()) return false; if (hasServiceLevelObjective()) { if (!getServiceLevelObjective().equals(other.getServiceLevelObjective())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasServiceLevelObjective()) { hash = (37 * hash) + SERVICE_LEVEL_OBJECTIVE_FIELD_NUMBER; hash = (53 * hash) + getServiceLevelObjective().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The `UpdateServiceLevelObjective` request. * </pre> * * Protobuf type {@code google.monitoring.v3.UpdateServiceLevelObjectiveRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.monitoring.v3.UpdateServiceLevelObjectiveRequest) com.google.monitoring.v3.UpdateServiceLevelObjectiveRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.monitoring.v3.ServiceMonitoringServiceProto .internal_static_google_monitoring_v3_UpdateServiceLevelObjectiveRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.monitoring.v3.ServiceMonitoringServiceProto .internal_static_google_monitoring_v3_UpdateServiceLevelObjectiveRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.class, com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.Builder.class); } // Construct using com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getServiceLevelObjectiveFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; serviceLevelObjective_ = null; if (serviceLevelObjectiveBuilder_ != null) { serviceLevelObjectiveBuilder_.dispose(); serviceLevelObjectiveBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.monitoring.v3.ServiceMonitoringServiceProto .internal_static_google_monitoring_v3_UpdateServiceLevelObjectiveRequest_descriptor; } @java.lang.Override public com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest getDefaultInstanceForType() { return com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.getDefaultInstance(); } @java.lang.Override public com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest build() { com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest buildPartial() { com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest result = new com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.serviceLevelObjective_ = serviceLevelObjectiveBuilder_ == null ? serviceLevelObjective_ : serviceLevelObjectiveBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest) { return mergeFrom((com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest other) { if (other == com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest.getDefaultInstance()) return this; if (other.hasServiceLevelObjective()) { mergeServiceLevelObjective(other.getServiceLevelObjective()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getServiceLevelObjectiveFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.monitoring.v3.ServiceLevelObjective serviceLevelObjective_; private com.google.protobuf.SingleFieldBuilderV3< com.google.monitoring.v3.ServiceLevelObjective, com.google.monitoring.v3.ServiceLevelObjective.Builder, com.google.monitoring.v3.ServiceLevelObjectiveOrBuilder> serviceLevelObjectiveBuilder_; /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the serviceLevelObjective field is set. */ public boolean hasServiceLevelObjective() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The serviceLevelObjective. */ public com.google.monitoring.v3.ServiceLevelObjective getServiceLevelObjective() { if (serviceLevelObjectiveBuilder_ == null) { return serviceLevelObjective_ == null ? com.google.monitoring.v3.ServiceLevelObjective.getDefaultInstance() : serviceLevelObjective_; } else { return serviceLevelObjectiveBuilder_.getMessage(); } } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setServiceLevelObjective(com.google.monitoring.v3.ServiceLevelObjective value) { if (serviceLevelObjectiveBuilder_ == null) { if (value == null) { throw new NullPointerException(); } serviceLevelObjective_ = value; } else { serviceLevelObjectiveBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setServiceLevelObjective( com.google.monitoring.v3.ServiceLevelObjective.Builder builderForValue) { if (serviceLevelObjectiveBuilder_ == null) { serviceLevelObjective_ = builderForValue.build(); } else { serviceLevelObjectiveBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeServiceLevelObjective( com.google.monitoring.v3.ServiceLevelObjective value) { if (serviceLevelObjectiveBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && serviceLevelObjective_ != null && serviceLevelObjective_ != com.google.monitoring.v3.ServiceLevelObjective.getDefaultInstance()) { getServiceLevelObjectiveBuilder().mergeFrom(value); } else { serviceLevelObjective_ = value; } } else { serviceLevelObjectiveBuilder_.mergeFrom(value); } if (serviceLevelObjective_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearServiceLevelObjective() { bitField0_ = (bitField0_ & ~0x00000001); serviceLevelObjective_ = null; if (serviceLevelObjectiveBuilder_ != null) { serviceLevelObjectiveBuilder_.dispose(); serviceLevelObjectiveBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.monitoring.v3.ServiceLevelObjective.Builder getServiceLevelObjectiveBuilder() { bitField0_ |= 0x00000001; onChanged(); return getServiceLevelObjectiveFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.monitoring.v3.ServiceLevelObjectiveOrBuilder getServiceLevelObjectiveOrBuilder() { if (serviceLevelObjectiveBuilder_ != null) { return serviceLevelObjectiveBuilder_.getMessageOrBuilder(); } else { return serviceLevelObjective_ == null ? com.google.monitoring.v3.ServiceLevelObjective.getDefaultInstance() : serviceLevelObjective_; } } /** * * * <pre> * Required. The `ServiceLevelObjective` to draw updates from. * The given `name` specifies the resource to update. * </pre> * * <code> * .google.monitoring.v3.ServiceLevelObjective service_level_objective = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.monitoring.v3.ServiceLevelObjective, com.google.monitoring.v3.ServiceLevelObjective.Builder, com.google.monitoring.v3.ServiceLevelObjectiveOrBuilder> getServiceLevelObjectiveFieldBuilder() { if (serviceLevelObjectiveBuilder_ == null) { serviceLevelObjectiveBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.monitoring.v3.ServiceLevelObjective, com.google.monitoring.v3.ServiceLevelObjective.Builder, com.google.monitoring.v3.ServiceLevelObjectiveOrBuilder>( getServiceLevelObjective(), getParentForChildren(), isClean()); serviceLevelObjective_ = null; } return serviceLevelObjectiveBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * A set of field paths defining which fields to use for the update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.monitoring.v3.UpdateServiceLevelObjectiveRequest) } // @@protoc_insertion_point(class_scope:google.monitoring.v3.UpdateServiceLevelObjectiveRequest) private static final com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest(); } public static com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateServiceLevelObjectiveRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateServiceLevelObjectiveRequest>() { @java.lang.Override public UpdateServiceLevelObjectiveRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateServiceLevelObjectiveRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateServiceLevelObjectiveRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.monitoring.v3.UpdateServiceLevelObjectiveRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/zookeeper
35,210
zookeeper-server/src/test/java/org/apache/zookeeper/test/ClientTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.Code; import org.apache.zookeeper.KeeperException.InvalidACLException; import org.apache.zookeeper.TestableZooKeeper; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher.Event.EventType; import org.apache.zookeeper.Watcher.Event.KeeperState; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.ZooDefs.Perms; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.proto.ExistsRequest; import org.apache.zookeeper.proto.ExistsResponse; import org.apache.zookeeper.proto.ReplyHeader; import org.apache.zookeeper.proto.RequestHeader; import org.apache.zookeeper.server.PrepRequestProcessor; import org.apache.zookeeper.server.util.OSMXBean; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ClientTest extends ClientBase { protected static final Logger LOG = LoggerFactory.getLogger(ClientTest.class); private boolean skipACL = System.getProperty("zookeeper.skipACL", "no").equals("yes"); /** Verify that pings are sent, keeping the "idle" client alive */ @Test public void testPing() throws Exception { ZooKeeper zkIdle = null; ZooKeeper zkWatchCreator = null; try { CountdownWatcher watcher = new CountdownWatcher(); zkIdle = createClient(watcher, hostPort, 10000); zkWatchCreator = createClient(); for (int i = 0; i < 10; i++) { zkWatchCreator.create("/" + i, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } for (int i = 0; i < 10; i++) { zkIdle.exists("/" + i, true); } for (int i = 0; i < 10; i++) { Thread.sleep(1000); zkWatchCreator.delete("/" + i, -1); } // The bug will manifest itself here because zkIdle will expire zkIdle.exists("/0", false); } finally { if (zkIdle != null) { zkIdle.close(); } if (zkWatchCreator != null) { zkWatchCreator.close(); } } } @Test public void testClientwithoutWatcherObj() throws IOException, InterruptedException, KeeperException { performClientTest(false); } @Test public void testClientWithWatcherObj() throws IOException, InterruptedException, KeeperException { performClientTest(true); } /** Exercise the testable functions, verify tostring, etc... */ @Test public void testTestability() throws Exception { TestableZooKeeper zk = createClient(); try { LOG.info("{}", zk.testableLocalSocketAddress()); LOG.info("{}", zk.testableRemoteSocketAddress()); LOG.info("{}", zk.toString()); } finally { zk.close(CONNECTION_TIMEOUT); LOG.info("{}", zk.testableLocalSocketAddress()); LOG.info("{}", zk.testableRemoteSocketAddress()); LOG.info("{}", zk.toString()); } } @Test public void testACLs() throws Exception { ZooKeeper zk = null; try { zk = createClient(); try { zk.create("/acltest", new byte[0], Ids.CREATOR_ALL_ACL, CreateMode.PERSISTENT); fail("Should have received an invalid acl error"); } catch (InvalidACLException e) { LOG.info("Test successful, invalid acl received : {}", e.getMessage()); } try { ArrayList<ACL> testACL = new ArrayList<>(); testACL.add(new ACL(Perms.ALL | Perms.ADMIN, Ids.AUTH_IDS)); testACL.add(new ACL(Perms.ALL | Perms.ADMIN, new Id("ip", "127.0.0.1/8"))); zk.create("/acltest", new byte[0], testACL, CreateMode.PERSISTENT); fail("Should have received an invalid acl error"); } catch (InvalidACLException e) { LOG.info("Test successful, invalid acl received : {}", e.getMessage()); } try { ArrayList<ACL> testACL = new ArrayList<>(); testACL.add(new ACL(Perms.ALL | Perms.ADMIN, new Id())); zk.create("/nullidtest", new byte[0], testACL, CreateMode.PERSISTENT); fail("Should have received an invalid acl error"); } catch (InvalidACLException e) { LOG.info("Test successful, invalid acl received : {}", e.getMessage()); } zk.addAuthInfo("digest", "ben:passwd".getBytes()); ArrayList<ACL> testACL = new ArrayList<>(); testACL.add(new ACL(Perms.ALL, new Id("auth", ""))); testACL.add(new ACL(Perms.WRITE, new Id("ip", "127.0.0.1"))); zk.create("/acltest", new byte[0], testACL, CreateMode.PERSISTENT); zk.close(); zk = createClient(); zk.addAuthInfo("digest", "ben:passwd2".getBytes()); if (skipACL) { try { zk.getData("/acltest", false, null); } catch (KeeperException e) { fail("Badauth reads should succeed with skipACL."); } } else { try { zk.getData("/acltest", false, null); fail("Should have received a permission error"); } catch (KeeperException e) { assertEquals(Code.NOAUTH, e.code()); } } zk.addAuthInfo("digest", "ben:passwd".getBytes()); zk.getData("/acltest", false, null); zk.setACL("/acltest", Ids.OPEN_ACL_UNSAFE, -1); zk.close(); zk = createClient(); zk.getData("/acltest", false, null); List<ACL> acls = zk.getACL("/acltest", new Stat()); assertEquals(1, acls.size()); assertEquals(Ids.OPEN_ACL_UNSAFE, acls); // The stat parameter should be optional. acls = zk.getACL("/acltest", null); assertEquals(1, acls.size()); assertEquals(Ids.OPEN_ACL_UNSAFE, acls); zk.close(); } finally { if (zk != null) { zk.close(); } } } @Test public void testNullAuthId() throws Exception { ZooKeeper zk = null; try { zk = createClient(); zk.addAuthInfo("digest", "ben:passwd".getBytes()); ArrayList<ACL> testACL = new ArrayList<>(); testACL.add(new ACL(Perms.ALL, new Id("auth", null))); zk.create("/acltest", new byte[0], testACL, CreateMode.PERSISTENT); zk.close(); zk = createClient(); zk.addAuthInfo("digest", "ben:passwd2".getBytes()); if (skipACL) { try { zk.getData("/acltest", false, null); } catch (KeeperException e) { fail("Badauth reads should succeed with skipACL."); } } else { try { zk.getData("/acltest", false, null); fail("Should have received a permission error"); } catch (KeeperException e) { assertEquals(Code.NOAUTH, e.code()); } } zk.addAuthInfo("digest", "ben:passwd".getBytes()); zk.getData("/acltest", false, null); zk.setACL("/acltest", Ids.OPEN_ACL_UNSAFE, -1); zk.close(); zk = createClient(); zk.getData("/acltest", false, null); List<ACL> acls = zk.getACL("/acltest", new Stat()); assertEquals(1, acls.size()); assertEquals(Ids.OPEN_ACL_UNSAFE, acls); } finally { if (zk != null) { zk.close(); } } } private class MyWatcher extends CountdownWatcher { LinkedBlockingQueue<WatchedEvent> events = new LinkedBlockingQueue<>(); public void process(WatchedEvent event) { super.process(event); if (event.getType() != EventType.None) { try { events.put(event); } catch (InterruptedException e) { LOG.warn("ignoring interrupt during event.put"); } } } } /** * Register multiple watchers and verify that they all get notified and * in the right order. */ @Test public void testMultipleWatcherObjs() throws IOException, InterruptedException, KeeperException { ZooKeeper zk = createClient(new CountdownWatcher(), hostPort); try { MyWatcher[] watchers = new MyWatcher[100]; MyWatcher[] watchers2 = new MyWatcher[watchers.length]; for (int i = 0; i < watchers.length; i++) { watchers[i] = new MyWatcher(); watchers2[i] = new MyWatcher(); zk.create("/foo-" + i, ("foodata" + i).getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } Stat stat = new Stat(); // // test get/exists with single set of watchers // get all, then exists all // for (int i = 0; i < watchers.length; i++) { assertNotNull(zk.getData("/foo-" + i, watchers[i], stat)); } for (int i = 0; i < watchers.length; i++) { assertNotNull(zk.exists("/foo-" + i, watchers[i])); } // trigger the watches for (int i = 0; i < watchers.length; i++) { zk.setData("/foo-" + i, ("foodata2-" + i).getBytes(), -1); zk.setData("/foo-" + i, ("foodata3-" + i).getBytes(), -1); } for (int i = 0; i < watchers.length; i++) { WatchedEvent event = watchers[i].events.poll(10, TimeUnit.SECONDS); assertEquals("/foo-" + i, event.getPath()); assertEquals(EventType.NodeDataChanged, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); // small chance that an unexpected message was delivered // after this check, but we would catch that next time // we check events assertEquals(0, watchers[i].events.size()); } // // test get/exists with single set of watchers // get/exists together // for (int i = 0; i < watchers.length; i++) { assertNotNull(zk.getData("/foo-" + i, watchers[i], stat)); assertNotNull(zk.exists("/foo-" + i, watchers[i])); } // trigger the watches for (int i = 0; i < watchers.length; i++) { zk.setData("/foo-" + i, ("foodata4-" + i).getBytes(), -1); zk.setData("/foo-" + i, ("foodata5-" + i).getBytes(), -1); } for (int i = 0; i < watchers.length; i++) { WatchedEvent event = watchers[i].events.poll(10, TimeUnit.SECONDS); assertEquals("/foo-" + i, event.getPath()); assertEquals(EventType.NodeDataChanged, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); // small chance that an unexpected message was delivered // after this check, but we would catch that next time // we check events assertEquals(0, watchers[i].events.size()); } // // test get/exists with two sets of watchers // for (int i = 0; i < watchers.length; i++) { assertNotNull(zk.getData("/foo-" + i, watchers[i], stat)); assertNotNull(zk.exists("/foo-" + i, watchers2[i])); } // trigger the watches for (int i = 0; i < watchers.length; i++) { zk.setData("/foo-" + i, ("foodata6-" + i).getBytes(), -1); zk.setData("/foo-" + i, ("foodata7-" + i).getBytes(), -1); } for (int i = 0; i < watchers.length; i++) { WatchedEvent event = watchers[i].events.poll(10, TimeUnit.SECONDS); assertEquals("/foo-" + i, event.getPath()); assertEquals(EventType.NodeDataChanged, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); // small chance that an unexpected message was delivered // after this check, but we would catch that next time // we check events assertEquals(0, watchers[i].events.size()); // watchers2 WatchedEvent event2 = watchers2[i].events.poll(10, TimeUnit.SECONDS); assertEquals("/foo-" + i, event2.getPath()); assertEquals(EventType.NodeDataChanged, event2.getType()); assertEquals(KeeperState.SyncConnected, event2.getState()); // small chance that an unexpected message was delivered // after this check, but we would catch that next time // we check events assertEquals(0, watchers2[i].events.size()); } } finally { if (zk != null) { zk.close(); } } } private void performClientTest(boolean withWatcherObj) throws IOException, InterruptedException, KeeperException { ZooKeeper zk = null; try { MyWatcher watcher = new MyWatcher(); zk = createClient(watcher, hostPort); LOG.info("Before create /benwashere"); zk.create("/benwashere", "".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); LOG.info("After create /benwashere"); try { zk.setData("/benwashere", "hi".getBytes(), 57); fail("Should have gotten BadVersion exception"); } catch (KeeperException.BadVersionException e) { // expected that } catch (KeeperException e) { fail("Should have gotten BadVersion exception"); } LOG.info("Before delete /benwashere"); zk.delete("/benwashere", 0); LOG.info("After delete /benwashere"); zk.close(); Thread.sleep(2000); zk = createClient(watcher, hostPort); LOG.info("Before delete /"); try { zk.delete("/", -1); fail("deleted root!"); } catch (KeeperException.BadArgumentsException e) { // good, expected that } Stat stat = new Stat(); // Test basic create, ls, and getData zk.create("/pat", "Pat was here".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); LOG.info("Before create /ben"); zk.create("/pat/ben", "Ben was here".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); LOG.info("Before getChildren /pat"); List<String> children = zk.getChildren("/pat", false); assertEquals(1, children.size()); assertEquals("ben", children.get(0)); List<String> children2 = zk.getChildren("/pat", false, null); assertEquals(children, children2); String value = new String(zk.getData("/pat/ben", false, stat)); assertEquals("Ben was here", value); // Test stat and watch of non existent node try { if (withWatcherObj) { assertEquals(null, zk.exists("/frog", watcher)); } else { assertEquals(null, zk.exists("/frog", true)); } LOG.info("Comment: asseting passed for frog setting /"); } catch (KeeperException.NoNodeException e) { // OK, expected that } zk.create("/frog", "hi".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); // the first poll is just a session delivery LOG.info("Comment: checking for events length {}", watcher.events.size()); WatchedEvent event = watcher.events.poll(10, TimeUnit.SECONDS); assertEquals("/frog", event.getPath()); assertEquals(EventType.NodeCreated, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); // Test child watch and create with sequence zk.getChildren("/pat/ben", true); for (int i = 0; i < 10; i++) { zk.create("/pat/ben/" + i + "-", Integer.toString(i).getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); } children = zk.getChildren("/pat/ben", false); Collections.sort(children); assertEquals(10, children.size()); for (int i = 0; i < 10; i++) { final String name = children.get(i); assertTrue(name.startsWith(i + "-"), "starts with -"); byte[] b; if (withWatcherObj) { b = zk.getData("/pat/ben/" + name, watcher, stat); } else { b = zk.getData("/pat/ben/" + name, true, stat); } assertEquals(Integer.toString(i), new String(b)); zk.setData("/pat/ben/" + name, "new".getBytes(), stat.getVersion()); if (withWatcherObj) { stat = zk.exists("/pat/ben/" + name, watcher); } else { stat = zk.exists("/pat/ben/" + name, true); } zk.delete("/pat/ben/" + name, stat.getVersion()); } event = watcher.events.poll(10, TimeUnit.SECONDS); assertEquals("/pat/ben", event.getPath()); assertEquals(EventType.NodeChildrenChanged, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); for (int i = 0; i < 10; i++) { event = watcher.events.poll(10, TimeUnit.SECONDS); final String name = children.get(i); assertEquals("/pat/ben/" + name, event.getPath()); assertEquals(EventType.NodeDataChanged, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); event = watcher.events.poll(10, TimeUnit.SECONDS); assertEquals("/pat/ben/" + name, event.getPath()); assertEquals(EventType.NodeDeleted, event.getType()); assertEquals(KeeperState.SyncConnected, event.getState()); } zk.create("/good\u0040path", "".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/duplicate", "".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); try { zk.create("/duplicate", "".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); fail("duplicate create allowed"); } catch (KeeperException.NodeExistsException e) { // OK, expected that } } finally { if (zk != null) { zk.close(); } } } // Test that sequential filenames are being created correctly, // with 0-padding in the filename @Test public void testSequentialNodeNames() throws IOException, InterruptedException, KeeperException { String path = "/SEQUENCE"; String file = "TEST"; String filepath = path + "/" + file; ZooKeeper zk = null; try { zk = createClient(); zk.create(path, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create(filepath, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); List<String> children = zk.getChildren(path, false); assertEquals(1, children.size()); assertEquals(file + "0000000000", children.get(0)); zk.create(filepath, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); children = zk.getChildren(path, false); assertEquals(2, children.size()); assertTrue(children.contains(file + "0000000001"), "contains child 1"); zk.create(filepath, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); children = zk.getChildren(path, false); assertEquals(3, children.size()); assertTrue(children.contains(file + "0000000002"), "contains child 2"); // The pattern is holding so far. Let's run the counter a bit // to be sure it continues to spit out the correct answer for (int i = children.size(); i < 105; i++) { zk.create(filepath, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); } children = zk.getChildren(path, false); assertTrue(children.contains(file + "0000000104"), "contains child 104"); } finally { if (zk != null) { zk.close(); } } } // Test that data provided when // creating sequential nodes is stored properly @Test public void testSequentialNodeData() throws Exception { ZooKeeper zk = null; String queue_handle = "/queue"; try { zk = createClient(); zk.create(queue_handle, new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create(queue_handle + "/element", "0".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); zk.create(queue_handle + "/element", "1".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); List<String> children = zk.getChildren(queue_handle, true); assertEquals(children.size(), 2); String child1 = children.get(0); String child2 = children.get(1); int compareResult = child1.compareTo(child2); assertNotSame(compareResult, 0); if (compareResult < 0) { } else { String temp = child1; child1 = child2; child2 = temp; } String child1data = new String(zk.getData(queue_handle + "/" + child1, false, null)); String child2data = new String(zk.getData(queue_handle + "/" + child2, false, null)); assertEquals(child1data, "0"); assertEquals(child2data, "1"); } finally { if (zk != null) { zk.close(); } } } @Test public void testLargeNodeData() throws Exception { ZooKeeper zk = null; String queue_handle = "/large"; try { zk = createClient(); zk.create(queue_handle, new byte[500000], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } finally { if (zk != null) { zk.close(); } } } private void verifyCreateFails(String path, ZooKeeper zk) throws Exception { try { zk.create(path, null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } catch (IllegalArgumentException e) { // this is good return; } fail("bad path \"" + path + "\" not caught"); } // Test that the path string is validated @Test public void testPathValidation() throws Exception { ZooKeeper zk = createClient(); verifyCreateFails(null, zk); verifyCreateFails("", zk); verifyCreateFails("//", zk); verifyCreateFails("///", zk); verifyCreateFails("////", zk); verifyCreateFails("/.", zk); verifyCreateFails("/..", zk); verifyCreateFails("/./", zk); verifyCreateFails("/../", zk); verifyCreateFails("/foo/./", zk); verifyCreateFails("/foo/../", zk); verifyCreateFails("/foo/.", zk); verifyCreateFails("/foo/..", zk); verifyCreateFails("/./.", zk); verifyCreateFails("/../..", zk); verifyCreateFails("/\u0001foo", zk); verifyCreateFails("/foo/bar/", zk); verifyCreateFails("/foo//bar", zk); verifyCreateFails("/foo/bar//", zk); verifyCreateFails("foo", zk); verifyCreateFails("a", zk); zk.create("/createseqpar", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); // next two steps - related to sequential processing // 1) verify that empty child name fails if not sequential try { zk.create("/createseqpar/", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); assertTrue(false); } catch (IllegalArgumentException be) { // catch this. } // 2) verify that empty child name success if sequential zk.create("/createseqpar/", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); zk.create("/createseqpar/.", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); zk.create("/createseqpar/..", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); try { zk.create("/createseqpar//", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); assertTrue(false); } catch (IllegalArgumentException be) { // catch this. } try { zk.create("/createseqpar/./", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); assertTrue(false); } catch (IllegalArgumentException be) { // catch this. } try { zk.create("/createseqpar/../", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT_SEQUENTIAL); assertTrue(false); } catch (IllegalArgumentException be) { // catch this. } //check for the code path that throws at server PrepRequestProcessor.setFailCreate(true); try { zk.create("/m", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); assertTrue(false); } catch (KeeperException.BadArgumentsException be) { // catch this. } PrepRequestProcessor.setFailCreate(false); zk.create("/.foo", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/.f.", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/..f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/..f..", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f.c", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f\u0040f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f/.f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f/f.", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f/..f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f/f..", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f/.f/f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/f/f./f", null, Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } @Test public void testDeleteWithChildren() throws Exception { ZooKeeper zk = createClient(); zk.create("/parent", new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); zk.create("/parent/child", new byte[0], Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); try { zk.delete("/parent", -1); fail("Should have received a not equals message"); } catch (KeeperException e) { assertEquals(KeeperException.Code.NOTEMPTY, e.code()); } zk.delete("/parent/child", -1); zk.delete("/parent", -1); zk.close(); } private class VerifyClientCleanup extends Thread { int count; int current = 0; VerifyClientCleanup(String name, int count) { super(name); this.count = count; } public void run() { try { for (; current < count; current++) { TestableZooKeeper zk = createClient(); // we've asked to close, wait for it to finish closing // all the sub-threads otw the selector may not be // closed when we check (false positive on test failure zk.close(CONNECTION_TIMEOUT); } } catch (Throwable t) { LOG.error("test failed", t); } } } /** * Verify that the client is cleaning up properly. Open/close a large * number of sessions. Essentially looking to see if sockets/selectors * are being cleaned up properly during close. * * @throws Throwable */ @Test public void testClientCleanup() throws Throwable { OSMXBean osMbean = new OSMXBean(); if (!osMbean.getUnix()) { LOG.warn("skipping testClientCleanup, only available on Unix"); return; } final int threadCount = 3; final int clientCount = 10; /* Log the number of fds used before and after a test is run. Verifies * we are freeing resources correctly. Unfortunately this only works * on unix systems (the only place sun has implemented as part of the * mgmt bean api). */ long initialFdCount = osMbean.getOpenFileDescriptorCount(); VerifyClientCleanup[] threads = new VerifyClientCleanup[threadCount]; for (int i = 0; i < threads.length; i++) { threads[i] = new VerifyClientCleanup("VCC" + i, clientCount); threads[i].start(); } for (int i = 0; i < threads.length; i++) { threads[i].join(CONNECTION_TIMEOUT); assertTrue(threads[i].current == threads[i].count); } // if this fails it means we are not cleaning up after the closed // sessions. long currentCount = osMbean.getOpenFileDescriptorCount(); final String logmsg = "open fds after test ({}) are not significantly higher than before ({})"; if (currentCount > initialFdCount + 10) { // consider as error LOG.error(logmsg, currentCount, initialFdCount); } else { LOG.info(logmsg, currentCount, initialFdCount); } } /** * We create a perfectly valid 'exists' request, except that the opcode is wrong. * @throws Exception */ @Test public void testNonExistingOpCode() throws Exception { final CountDownLatch clientDisconnected = new CountDownLatch(1); Watcher watcher = new Watcher() { @Override public synchronized void process(WatchedEvent event) { if (event.getState() == KeeperState.Disconnected) { clientDisconnected.countDown(); } } }; TestableZooKeeper zk = new TestableZooKeeper(hostPort, CONNECTION_TIMEOUT, watcher); final String path = "/m1"; RequestHeader h = new RequestHeader(); h.setType(888); // This code does not exists ExistsRequest request = new ExistsRequest(); request.setPath(path); request.setWatch(false); ExistsResponse response = new ExistsResponse(); ReplyHeader r = zk.submitRequest(h, request, response, null); assertEquals(r.getErr(), Code.UNIMPLEMENTED.intValue()); // Sending a nonexisting opcode should cause the server to disconnect assertTrue(clientDisconnected.await(5000, TimeUnit.MILLISECONDS), "failed to disconnect"); zk.close(); } @Test public void testTryWithResources() throws Exception { ZooKeeper zooKeeper; try (ZooKeeper zk = createClient()) { zooKeeper = zk; assertTrue(zooKeeper.getState().isAlive()); } assertFalse(zooKeeper.getState().isAlive()); } @Test public void testCXidRollover() throws Exception { TestableZooKeeper zk = null; try { zk = createClient(); zk.setXid(Integer.MAX_VALUE - 10); zk.create("/testnode", "".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); for (int i = 0; i < 20; ++i) { final CountDownLatch latch = new CountDownLatch(1); final AtomicInteger rc = new AtomicInteger(0); zk.setData("/testnode", "".getBytes(), -1, (retcode, path, ctx, stat) -> { rc.set(retcode); latch.countDown(); }, null); assertTrue(latch.await(zk.getSessionTimeout(), TimeUnit.MILLISECONDS), "setData should complete within 5s"); assertEquals(Code.OK.intValue(), rc.get(), "setData should have succeeded"); } zk.delete("/testnode", -1); assertTrue(zk.checkXid() > 0, "xid should be positive"); } finally { if (zk != null) { zk.close(); } } } }
googleapis/google-cloud-java
34,991
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/EvaluateInstancesRequestOrBuilder.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; public interface EvaluateInstancesRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1beta1.EvaluateInstancesRequest) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * Auto metric instances. * Instances and metric spec for exact match metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ExactMatchInput exact_match_input = 2;</code> * * @return Whether the exactMatchInput field is set. */ boolean hasExactMatchInput(); /** * * * <pre> * Auto metric instances. * Instances and metric spec for exact match metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ExactMatchInput exact_match_input = 2;</code> * * @return The exactMatchInput. */ com.google.cloud.aiplatform.v1beta1.ExactMatchInput getExactMatchInput(); /** * * * <pre> * Auto metric instances. * Instances and metric spec for exact match metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ExactMatchInput exact_match_input = 2;</code> */ com.google.cloud.aiplatform.v1beta1.ExactMatchInputOrBuilder getExactMatchInputOrBuilder(); /** * * * <pre> * Instances and metric spec for bleu metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.BleuInput bleu_input = 3;</code> * * @return Whether the bleuInput field is set. */ boolean hasBleuInput(); /** * * * <pre> * Instances and metric spec for bleu metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.BleuInput bleu_input = 3;</code> * * @return The bleuInput. */ com.google.cloud.aiplatform.v1beta1.BleuInput getBleuInput(); /** * * * <pre> * Instances and metric spec for bleu metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.BleuInput bleu_input = 3;</code> */ com.google.cloud.aiplatform.v1beta1.BleuInputOrBuilder getBleuInputOrBuilder(); /** * * * <pre> * Instances and metric spec for rouge metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.RougeInput rouge_input = 4;</code> * * @return Whether the rougeInput field is set. */ boolean hasRougeInput(); /** * * * <pre> * Instances and metric spec for rouge metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.RougeInput rouge_input = 4;</code> * * @return The rougeInput. */ com.google.cloud.aiplatform.v1beta1.RougeInput getRougeInput(); /** * * * <pre> * Instances and metric spec for rouge metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.RougeInput rouge_input = 4;</code> */ com.google.cloud.aiplatform.v1beta1.RougeInputOrBuilder getRougeInputOrBuilder(); /** * * * <pre> * LLM-based metric instance. * General text generation metrics, applicable to other categories. * Input for fluency metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.FluencyInput fluency_input = 5;</code> * * @return Whether the fluencyInput field is set. */ boolean hasFluencyInput(); /** * * * <pre> * LLM-based metric instance. * General text generation metrics, applicable to other categories. * Input for fluency metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.FluencyInput fluency_input = 5;</code> * * @return The fluencyInput. */ com.google.cloud.aiplatform.v1beta1.FluencyInput getFluencyInput(); /** * * * <pre> * LLM-based metric instance. * General text generation metrics, applicable to other categories. * Input for fluency metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.FluencyInput fluency_input = 5;</code> */ com.google.cloud.aiplatform.v1beta1.FluencyInputOrBuilder getFluencyInputOrBuilder(); /** * * * <pre> * Input for coherence metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.CoherenceInput coherence_input = 6;</code> * * @return Whether the coherenceInput field is set. */ boolean hasCoherenceInput(); /** * * * <pre> * Input for coherence metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.CoherenceInput coherence_input = 6;</code> * * @return The coherenceInput. */ com.google.cloud.aiplatform.v1beta1.CoherenceInput getCoherenceInput(); /** * * * <pre> * Input for coherence metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.CoherenceInput coherence_input = 6;</code> */ com.google.cloud.aiplatform.v1beta1.CoherenceInputOrBuilder getCoherenceInputOrBuilder(); /** * * * <pre> * Input for safety metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.SafetyInput safety_input = 8;</code> * * @return Whether the safetyInput field is set. */ boolean hasSafetyInput(); /** * * * <pre> * Input for safety metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.SafetyInput safety_input = 8;</code> * * @return The safetyInput. */ com.google.cloud.aiplatform.v1beta1.SafetyInput getSafetyInput(); /** * * * <pre> * Input for safety metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.SafetyInput safety_input = 8;</code> */ com.google.cloud.aiplatform.v1beta1.SafetyInputOrBuilder getSafetyInputOrBuilder(); /** * * * <pre> * Input for groundedness metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GroundednessInput groundedness_input = 9;</code> * * @return Whether the groundednessInput field is set. */ boolean hasGroundednessInput(); /** * * * <pre> * Input for groundedness metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GroundednessInput groundedness_input = 9;</code> * * @return The groundednessInput. */ com.google.cloud.aiplatform.v1beta1.GroundednessInput getGroundednessInput(); /** * * * <pre> * Input for groundedness metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GroundednessInput groundedness_input = 9;</code> */ com.google.cloud.aiplatform.v1beta1.GroundednessInputOrBuilder getGroundednessInputOrBuilder(); /** * * * <pre> * Input for fulfillment metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.FulfillmentInput fulfillment_input = 12;</code> * * @return Whether the fulfillmentInput field is set. */ boolean hasFulfillmentInput(); /** * * * <pre> * Input for fulfillment metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.FulfillmentInput fulfillment_input = 12;</code> * * @return The fulfillmentInput. */ com.google.cloud.aiplatform.v1beta1.FulfillmentInput getFulfillmentInput(); /** * * * <pre> * Input for fulfillment metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.FulfillmentInput fulfillment_input = 12;</code> */ com.google.cloud.aiplatform.v1beta1.FulfillmentInputOrBuilder getFulfillmentInputOrBuilder(); /** * * * <pre> * Input for summarization quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationQualityInput summarization_quality_input = 7; * </code> * * @return Whether the summarizationQualityInput field is set. */ boolean hasSummarizationQualityInput(); /** * * * <pre> * Input for summarization quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationQualityInput summarization_quality_input = 7; * </code> * * @return The summarizationQualityInput. */ com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput getSummarizationQualityInput(); /** * * * <pre> * Input for summarization quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationQualityInput summarization_quality_input = 7; * </code> */ com.google.cloud.aiplatform.v1beta1.SummarizationQualityInputOrBuilder getSummarizationQualityInputOrBuilder(); /** * * * <pre> * Input for pairwise summarization quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput pairwise_summarization_quality_input = 23; * </code> * * @return Whether the pairwiseSummarizationQualityInput field is set. */ boolean hasPairwiseSummarizationQualityInput(); /** * * * <pre> * Input for pairwise summarization quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput pairwise_summarization_quality_input = 23; * </code> * * @return The pairwiseSummarizationQualityInput. */ com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput getPairwiseSummarizationQualityInput(); /** * * * <pre> * Input for pairwise summarization quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInput pairwise_summarization_quality_input = 23; * </code> */ com.google.cloud.aiplatform.v1beta1.PairwiseSummarizationQualityInputOrBuilder getPairwiseSummarizationQualityInputOrBuilder(); /** * * * <pre> * Input for summarization helpfulness metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationHelpfulnessInput summarization_helpfulness_input = 14; * </code> * * @return Whether the summarizationHelpfulnessInput field is set. */ boolean hasSummarizationHelpfulnessInput(); /** * * * <pre> * Input for summarization helpfulness metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationHelpfulnessInput summarization_helpfulness_input = 14; * </code> * * @return The summarizationHelpfulnessInput. */ com.google.cloud.aiplatform.v1beta1.SummarizationHelpfulnessInput getSummarizationHelpfulnessInput(); /** * * * <pre> * Input for summarization helpfulness metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationHelpfulnessInput summarization_helpfulness_input = 14; * </code> */ com.google.cloud.aiplatform.v1beta1.SummarizationHelpfulnessInputOrBuilder getSummarizationHelpfulnessInputOrBuilder(); /** * * * <pre> * Input for summarization verbosity metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput summarization_verbosity_input = 15; * </code> * * @return Whether the summarizationVerbosityInput field is set. */ boolean hasSummarizationVerbosityInput(); /** * * * <pre> * Input for summarization verbosity metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput summarization_verbosity_input = 15; * </code> * * @return The summarizationVerbosityInput. */ com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput getSummarizationVerbosityInput(); /** * * * <pre> * Input for summarization verbosity metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.SummarizationVerbosityInput summarization_verbosity_input = 15; * </code> */ com.google.cloud.aiplatform.v1beta1.SummarizationVerbosityInputOrBuilder getSummarizationVerbosityInputOrBuilder(); /** * * * <pre> * Input for question answering quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringQualityInput question_answering_quality_input = 10; * </code> * * @return Whether the questionAnsweringQualityInput field is set. */ boolean hasQuestionAnsweringQualityInput(); /** * * * <pre> * Input for question answering quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringQualityInput question_answering_quality_input = 10; * </code> * * @return The questionAnsweringQualityInput. */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringQualityInput getQuestionAnsweringQualityInput(); /** * * * <pre> * Input for question answering quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringQualityInput question_answering_quality_input = 10; * </code> */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringQualityInputOrBuilder getQuestionAnsweringQualityInputOrBuilder(); /** * * * <pre> * Input for pairwise question answering quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseQuestionAnsweringQualityInput pairwise_question_answering_quality_input = 24; * </code> * * @return Whether the pairwiseQuestionAnsweringQualityInput field is set. */ boolean hasPairwiseQuestionAnsweringQualityInput(); /** * * * <pre> * Input for pairwise question answering quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseQuestionAnsweringQualityInput pairwise_question_answering_quality_input = 24; * </code> * * @return The pairwiseQuestionAnsweringQualityInput. */ com.google.cloud.aiplatform.v1beta1.PairwiseQuestionAnsweringQualityInput getPairwiseQuestionAnsweringQualityInput(); /** * * * <pre> * Input for pairwise question answering quality metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseQuestionAnsweringQualityInput pairwise_question_answering_quality_input = 24; * </code> */ com.google.cloud.aiplatform.v1beta1.PairwiseQuestionAnsweringQualityInputOrBuilder getPairwiseQuestionAnsweringQualityInputOrBuilder(); /** * * * <pre> * Input for question answering relevance metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringRelevanceInput question_answering_relevance_input = 16; * </code> * * @return Whether the questionAnsweringRelevanceInput field is set. */ boolean hasQuestionAnsweringRelevanceInput(); /** * * * <pre> * Input for question answering relevance metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringRelevanceInput question_answering_relevance_input = 16; * </code> * * @return The questionAnsweringRelevanceInput. */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringRelevanceInput getQuestionAnsweringRelevanceInput(); /** * * * <pre> * Input for question answering relevance metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringRelevanceInput question_answering_relevance_input = 16; * </code> */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringRelevanceInputOrBuilder getQuestionAnsweringRelevanceInputOrBuilder(); /** * * * <pre> * Input for question answering helpfulness * metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput question_answering_helpfulness_input = 17; * </code> * * @return Whether the questionAnsweringHelpfulnessInput field is set. */ boolean hasQuestionAnsweringHelpfulnessInput(); /** * * * <pre> * Input for question answering helpfulness * metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput question_answering_helpfulness_input = 17; * </code> * * @return The questionAnsweringHelpfulnessInput. */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput getQuestionAnsweringHelpfulnessInput(); /** * * * <pre> * Input for question answering helpfulness * metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInput question_answering_helpfulness_input = 17; * </code> */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringHelpfulnessInputOrBuilder getQuestionAnsweringHelpfulnessInputOrBuilder(); /** * * * <pre> * Input for question answering correctness * metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput question_answering_correctness_input = 18; * </code> * * @return Whether the questionAnsweringCorrectnessInput field is set. */ boolean hasQuestionAnsweringCorrectnessInput(); /** * * * <pre> * Input for question answering correctness * metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput question_answering_correctness_input = 18; * </code> * * @return The questionAnsweringCorrectnessInput. */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput getQuestionAnsweringCorrectnessInput(); /** * * * <pre> * Input for question answering correctness * metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInput question_answering_correctness_input = 18; * </code> */ com.google.cloud.aiplatform.v1beta1.QuestionAnsweringCorrectnessInputOrBuilder getQuestionAnsweringCorrectnessInputOrBuilder(); /** * * * <pre> * Input for pointwise metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.PointwiseMetricInput pointwise_metric_input = 28;</code> * * @return Whether the pointwiseMetricInput field is set. */ boolean hasPointwiseMetricInput(); /** * * * <pre> * Input for pointwise metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.PointwiseMetricInput pointwise_metric_input = 28;</code> * * @return The pointwiseMetricInput. */ com.google.cloud.aiplatform.v1beta1.PointwiseMetricInput getPointwiseMetricInput(); /** * * * <pre> * Input for pointwise metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.PointwiseMetricInput pointwise_metric_input = 28;</code> */ com.google.cloud.aiplatform.v1beta1.PointwiseMetricInputOrBuilder getPointwiseMetricInputOrBuilder(); /** * * * <pre> * Input for pairwise metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.PairwiseMetricInput pairwise_metric_input = 29;</code> * * @return Whether the pairwiseMetricInput field is set. */ boolean hasPairwiseMetricInput(); /** * * * <pre> * Input for pairwise metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.PairwiseMetricInput pairwise_metric_input = 29;</code> * * @return The pairwiseMetricInput. */ com.google.cloud.aiplatform.v1beta1.PairwiseMetricInput getPairwiseMetricInput(); /** * * * <pre> * Input for pairwise metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.PairwiseMetricInput pairwise_metric_input = 29;</code> */ com.google.cloud.aiplatform.v1beta1.PairwiseMetricInputOrBuilder getPairwiseMetricInputOrBuilder(); /** * * * <pre> * Tool call metric instances. * Input for tool call valid metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ToolCallValidInput tool_call_valid_input = 19;</code> * * @return Whether the toolCallValidInput field is set. */ boolean hasToolCallValidInput(); /** * * * <pre> * Tool call metric instances. * Input for tool call valid metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ToolCallValidInput tool_call_valid_input = 19;</code> * * @return The toolCallValidInput. */ com.google.cloud.aiplatform.v1beta1.ToolCallValidInput getToolCallValidInput(); /** * * * <pre> * Tool call metric instances. * Input for tool call valid metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ToolCallValidInput tool_call_valid_input = 19;</code> */ com.google.cloud.aiplatform.v1beta1.ToolCallValidInputOrBuilder getToolCallValidInputOrBuilder(); /** * * * <pre> * Input for tool name match metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ToolNameMatchInput tool_name_match_input = 20;</code> * * @return Whether the toolNameMatchInput field is set. */ boolean hasToolNameMatchInput(); /** * * * <pre> * Input for tool name match metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ToolNameMatchInput tool_name_match_input = 20;</code> * * @return The toolNameMatchInput. */ com.google.cloud.aiplatform.v1beta1.ToolNameMatchInput getToolNameMatchInput(); /** * * * <pre> * Input for tool name match metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.ToolNameMatchInput tool_name_match_input = 20;</code> */ com.google.cloud.aiplatform.v1beta1.ToolNameMatchInputOrBuilder getToolNameMatchInputOrBuilder(); /** * * * <pre> * Input for tool parameter key match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ToolParameterKeyMatchInput tool_parameter_key_match_input = 21; * </code> * * @return Whether the toolParameterKeyMatchInput field is set. */ boolean hasToolParameterKeyMatchInput(); /** * * * <pre> * Input for tool parameter key match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ToolParameterKeyMatchInput tool_parameter_key_match_input = 21; * </code> * * @return The toolParameterKeyMatchInput. */ com.google.cloud.aiplatform.v1beta1.ToolParameterKeyMatchInput getToolParameterKeyMatchInput(); /** * * * <pre> * Input for tool parameter key match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ToolParameterKeyMatchInput tool_parameter_key_match_input = 21; * </code> */ com.google.cloud.aiplatform.v1beta1.ToolParameterKeyMatchInputOrBuilder getToolParameterKeyMatchInputOrBuilder(); /** * * * <pre> * Input for tool parameter key value match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ToolParameterKVMatchInput tool_parameter_kv_match_input = 22; * </code> * * @return Whether the toolParameterKvMatchInput field is set. */ boolean hasToolParameterKvMatchInput(); /** * * * <pre> * Input for tool parameter key value match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ToolParameterKVMatchInput tool_parameter_kv_match_input = 22; * </code> * * @return The toolParameterKvMatchInput. */ com.google.cloud.aiplatform.v1beta1.ToolParameterKVMatchInput getToolParameterKvMatchInput(); /** * * * <pre> * Input for tool parameter key value match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.ToolParameterKVMatchInput tool_parameter_kv_match_input = 22; * </code> */ com.google.cloud.aiplatform.v1beta1.ToolParameterKVMatchInputOrBuilder getToolParameterKvMatchInputOrBuilder(); /** * * * <pre> * Translation metrics. * Input for Comet metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.CometInput comet_input = 31;</code> * * @return Whether the cometInput field is set. */ boolean hasCometInput(); /** * * * <pre> * Translation metrics. * Input for Comet metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.CometInput comet_input = 31;</code> * * @return The cometInput. */ com.google.cloud.aiplatform.v1beta1.CometInput getCometInput(); /** * * * <pre> * Translation metrics. * Input for Comet metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.CometInput comet_input = 31;</code> */ com.google.cloud.aiplatform.v1beta1.CometInputOrBuilder getCometInputOrBuilder(); /** * * * <pre> * Input for Metricx metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.MetricxInput metricx_input = 32;</code> * * @return Whether the metricxInput field is set. */ boolean hasMetricxInput(); /** * * * <pre> * Input for Metricx metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.MetricxInput metricx_input = 32;</code> * * @return The metricxInput. */ com.google.cloud.aiplatform.v1beta1.MetricxInput getMetricxInput(); /** * * * <pre> * Input for Metricx metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.MetricxInput metricx_input = 32;</code> */ com.google.cloud.aiplatform.v1beta1.MetricxInputOrBuilder getMetricxInputOrBuilder(); /** * * * <pre> * Input for trajectory exact match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInput trajectory_exact_match_input = 33; * </code> * * @return Whether the trajectoryExactMatchInput field is set. */ boolean hasTrajectoryExactMatchInput(); /** * * * <pre> * Input for trajectory exact match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInput trajectory_exact_match_input = 33; * </code> * * @return The trajectoryExactMatchInput. */ com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInput getTrajectoryExactMatchInput(); /** * * * <pre> * Input for trajectory exact match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInput trajectory_exact_match_input = 33; * </code> */ com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInputOrBuilder getTrajectoryExactMatchInputOrBuilder(); /** * * * <pre> * Input for trajectory in order match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInput trajectory_in_order_match_input = 34; * </code> * * @return Whether the trajectoryInOrderMatchInput field is set. */ boolean hasTrajectoryInOrderMatchInput(); /** * * * <pre> * Input for trajectory in order match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInput trajectory_in_order_match_input = 34; * </code> * * @return The trajectoryInOrderMatchInput. */ com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInput getTrajectoryInOrderMatchInput(); /** * * * <pre> * Input for trajectory in order match metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInput trajectory_in_order_match_input = 34; * </code> */ com.google.cloud.aiplatform.v1beta1.TrajectoryInOrderMatchInputOrBuilder getTrajectoryInOrderMatchInputOrBuilder(); /** * * * <pre> * Input for trajectory match any order metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryAnyOrderMatchInput trajectory_any_order_match_input = 35; * </code> * * @return Whether the trajectoryAnyOrderMatchInput field is set. */ boolean hasTrajectoryAnyOrderMatchInput(); /** * * * <pre> * Input for trajectory match any order metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryAnyOrderMatchInput trajectory_any_order_match_input = 35; * </code> * * @return The trajectoryAnyOrderMatchInput. */ com.google.cloud.aiplatform.v1beta1.TrajectoryAnyOrderMatchInput getTrajectoryAnyOrderMatchInput(); /** * * * <pre> * Input for trajectory match any order metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryAnyOrderMatchInput trajectory_any_order_match_input = 35; * </code> */ com.google.cloud.aiplatform.v1beta1.TrajectoryAnyOrderMatchInputOrBuilder getTrajectoryAnyOrderMatchInputOrBuilder(); /** * * * <pre> * Input for trajectory precision metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInput trajectory_precision_input = 37; * </code> * * @return Whether the trajectoryPrecisionInput field is set. */ boolean hasTrajectoryPrecisionInput(); /** * * * <pre> * Input for trajectory precision metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInput trajectory_precision_input = 37; * </code> * * @return The trajectoryPrecisionInput. */ com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInput getTrajectoryPrecisionInput(); /** * * * <pre> * Input for trajectory precision metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInput trajectory_precision_input = 37; * </code> */ com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInputOrBuilder getTrajectoryPrecisionInputOrBuilder(); /** * * * <pre> * Input for trajectory recall metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.TrajectoryRecallInput trajectory_recall_input = 38; * </code> * * @return Whether the trajectoryRecallInput field is set. */ boolean hasTrajectoryRecallInput(); /** * * * <pre> * Input for trajectory recall metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.TrajectoryRecallInput trajectory_recall_input = 38; * </code> * * @return The trajectoryRecallInput. */ com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInput getTrajectoryRecallInput(); /** * * * <pre> * Input for trajectory recall metric. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.TrajectoryRecallInput trajectory_recall_input = 38; * </code> */ com.google.cloud.aiplatform.v1beta1.TrajectoryRecallInputOrBuilder getTrajectoryRecallInputOrBuilder(); /** * * * <pre> * Input for trajectory single tool use metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectorySingleToolUseInput trajectory_single_tool_use_input = 39; * </code> * * @return Whether the trajectorySingleToolUseInput field is set. */ boolean hasTrajectorySingleToolUseInput(); /** * * * <pre> * Input for trajectory single tool use metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectorySingleToolUseInput trajectory_single_tool_use_input = 39; * </code> * * @return The trajectorySingleToolUseInput. */ com.google.cloud.aiplatform.v1beta1.TrajectorySingleToolUseInput getTrajectorySingleToolUseInput(); /** * * * <pre> * Input for trajectory single tool use metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.TrajectorySingleToolUseInput trajectory_single_tool_use_input = 39; * </code> */ com.google.cloud.aiplatform.v1beta1.TrajectorySingleToolUseInputOrBuilder getTrajectorySingleToolUseInputOrBuilder(); /** * * * <pre> * Rubric Based Instruction Following metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.RubricBasedInstructionFollowingInput rubric_based_instruction_following_input = 40; * </code> * * @return Whether the rubricBasedInstructionFollowingInput field is set. */ boolean hasRubricBasedInstructionFollowingInput(); /** * * * <pre> * Rubric Based Instruction Following metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.RubricBasedInstructionFollowingInput rubric_based_instruction_following_input = 40; * </code> * * @return The rubricBasedInstructionFollowingInput. */ com.google.cloud.aiplatform.v1beta1.RubricBasedInstructionFollowingInput getRubricBasedInstructionFollowingInput(); /** * * * <pre> * Rubric Based Instruction Following metric. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.RubricBasedInstructionFollowingInput rubric_based_instruction_following_input = 40; * </code> */ com.google.cloud.aiplatform.v1beta1.RubricBasedInstructionFollowingInputOrBuilder getRubricBasedInstructionFollowingInputOrBuilder(); /** * * * <pre> * Required. The resource name of the Location to evaluate the instances. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string location = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The location. */ java.lang.String getLocation(); /** * * * <pre> * Required. The resource name of the Location to evaluate the instances. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string location = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for location. */ com.google.protobuf.ByteString getLocationBytes(); /** * * * <pre> * Optional. Autorater config used for evaluation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.AutoraterConfig autorater_config = 30 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the autoraterConfig field is set. */ boolean hasAutoraterConfig(); /** * * * <pre> * Optional. Autorater config used for evaluation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.AutoraterConfig autorater_config = 30 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The autoraterConfig. */ com.google.cloud.aiplatform.v1beta1.AutoraterConfig getAutoraterConfig(); /** * * * <pre> * Optional. Autorater config used for evaluation. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.AutoraterConfig autorater_config = 30 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ com.google.cloud.aiplatform.v1beta1.AutoraterConfigOrBuilder getAutoraterConfigOrBuilder(); com.google.cloud.aiplatform.v1beta1.EvaluateInstancesRequest.MetricInputsCase getMetricInputsCase(); }
hibernate/hibernate-search
32,713
integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/aggregation/TermsAggregationSpecificsIT.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.search.integrationtest.backend.tck.search.aggregation; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.assertj.core.api.Assertions.entry; import static org.hibernate.search.util.impl.integrationtest.common.NormalizationUtils.normalize; import static org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert.assertThatQuery; import static org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert.assertThatResult; import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import org.hibernate.search.engine.backend.common.DocumentReference; import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement; import org.hibernate.search.engine.backend.types.Aggregable; import org.hibernate.search.engine.backend.types.Searchable; import org.hibernate.search.engine.search.aggregation.AggregationKey; import org.hibernate.search.engine.search.aggregation.dsl.AggregationFinalStep; import org.hibernate.search.engine.search.aggregation.dsl.CompositeAggregationFrom1AsStep; import org.hibernate.search.engine.search.query.dsl.SearchQueryOptionsStep; import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.AggregationDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.operations.TermsAggregationDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.types.FieldTypeDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.types.StandardFieldTypeDescriptor; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldModelsByType; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.ValueWrapper; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.extension.SearchSetupHelper; import org.hibernate.search.util.impl.integrationtest.mapper.stub.BulkIndexer; import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex; import org.hibernate.search.util.impl.test.annotation.PortedFromSearch5; import org.hibernate.search.util.impl.test.annotation.TestForIssue; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.extension.RegisterExtension; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; /** * Tests behavior specific to the terms aggregation on supported field types. * <p> * Behavior common to all single-field aggregations is tested in {@link SingleFieldAggregationBaseIT}. */ class TermsAggregationSpecificsIT<F> { private static final String AGGREGATION_NAME = "aggregationName"; private static final Set<StandardFieldTypeDescriptor<?>> supportedFieldTypes = new LinkedHashSet<>(); private static final List<DataSet<?>> dataSets = new ArrayList<>(); private static final List<Arguments> parameters = new ArrayList<>(); static { AggregationDescriptor aggregationDescriptor = TermsAggregationDescriptor.INSTANCE; for ( StandardFieldTypeDescriptor<?> fieldType : FieldTypeDescriptor.getAllStandard() ) { if ( aggregationDescriptor.getSingleFieldAggregationExpectations( fieldType ).isSupported() ) { supportedFieldTypes.add( fieldType ); DataSet<?> dataSet = new DataSet<>( fieldType ); dataSets.add( dataSet ); parameters.add( Arguments.of( fieldType, dataSet ) ); } } } public static List<? extends Arguments> params() { return parameters; } @RegisterExtension public static final SearchSetupHelper setupHelper = SearchSetupHelper.create(); private static final SimpleMappedIndex<IndexBinding> index = SimpleMappedIndex.of( IndexBinding::new ); @BeforeAll static void setup() { setupHelper.start().withIndex( index ).setup(); for ( DataSet<?> dataSet : dataSets ) { dataSet.init(); } } @ParameterizedTest(name = "{0}") @MethodSource("params") void superClassFieldType(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { Class<? super F> superClass = fieldType.getJavaType().getSuperclass(); doTestSuperClassFieldType( superClass, fieldType, dataSet ); } private <S> void doTestSuperClassFieldType(Class<S> superClass, FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<S, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, superClass ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // All documents should be mentioned in the aggregation, even those excluded by the limit/offset containsInAnyOrder( c -> { dataSet.documentIdPerTerm.forEach( (key, value) -> c.accept( key, (long) value.size() ) ); }, fieldType ) ); } /** * Check that defining a predicate will affect the aggregation result. */ @ParameterizedTest(name = "{0}") @MethodSource("params") void predicate(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); Map.Entry<F, List<String>> firstTermEntry = dataSet.documentIdPerTerm.entrySet().iterator().next(); assertThatQuery( index.createScope().query() .where( f -> f.id() .matching( firstTermEntry.getValue().get( 0 ) ) .matching( firstTermEntry.getValue().get( 1 ) ) ) .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // Only document 0 should be taken into account by the aggregation containsInAnyOrder( c -> { c.accept( firstTermEntry.getKey(), 2L ); }, fieldType ) ); } /** * Check that defining a limit and offset will <strong>not</strong> affect the aggregation result. */ @ParameterizedTest(name = "{0}") @MethodSource("params") void limitAndOffset(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatResult( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) ) .fetch( 3, 4 ) ) .aggregation( aggregationKey, // All documents should be mentioned in the aggregation, even those excluded by the limit/offset containsInAnyOrder( c -> { dataSet.documentIdPerTerm.forEach( (key, value) -> c.accept( key, (long) value.size() ) ); }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testDefaultSortOrderIsCount") void order_default(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // The result should present buckets with decreasing term count containsExactly( c -> { for ( F value : dataSet.valuesInDescendingDocumentCountOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testCountSortOrderDesc") void orderByCountDescending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .orderByCountDescending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // The result should present buckets with decreasing term count containsExactly( c -> { for ( F value : dataSet.valuesInDescendingDocumentCountOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testCountSortOrderAsc") void orderByCountAscending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .orderByCountAscending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // The result should present buckets with increasing term count containsExactly( c -> { for ( F value : dataSet.valuesInAscendingDocumentCountOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void orderByTermDescending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .orderByTermDescending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // The result should present buckets with decreasing term dataSet.values containsExactly( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testAlphabeticalSortOrder") void orderByTermAscending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .orderByTermAscending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // The result should present buckets with increasing term dataSet.values containsExactly( c -> { for ( F value : dataSet.valuesInAscendingOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testZeroCountsExcluded") void minDocumentCount_positive(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .minDocumentCount( 2 ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, // Only buckets with the minimum required document count should appear in the result containsInAnyOrder( c -> { dataSet.documentIdPerTerm.forEach( (key, value) -> { int documentCount = value.size(); if ( documentCount >= 2 ) { c.accept( key, (long) documentCount ); } } ); }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testZeroCountsIncluded") void minDocumentCount_zero(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); Map.Entry<F, List<String>> firstTermEntry = dataSet.documentIdPerTerm.entrySet().iterator().next(); assertThatQuery( index.createScope().query() // Exclude documents containing the first term from matches .where( f -> f.matchAll().except( f.id().matchingAny( firstTermEntry.getValue() ) ) ) .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .minDocumentCount( 0 ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, /* * Buckets with a count of 0 should appear for dataSet.values that are in the index, * but are not encountered in any matching document. */ containsInAnyOrder( c -> { dataSet.documentIdPerTerm.entrySet().stream().skip( 1 ).forEach( e -> { c.accept( e.getKey(), (long) e.getValue().size() ); } ); c.accept( firstTermEntry.getKey(), 0L ); }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void minDocumentCount_zero_noMatch(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( index.createScope().query() // Exclude all documents from the matches .where( f -> f.id().matching( "none" ) ) .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .minDocumentCount( 0 ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, /* * All indexed terms should appear in a bucket, in ascending value order, with a count of zero. */ containsInAnyOrder( c -> { for ( F value : dataSet.valuesInAscendingOrder ) { c.accept( value, 0L ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void minDocumentCount_zero_noMatch_orderByTermDescending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( index.createScope().query() // Exclude all documents from the matches .where( f -> f.id().matching( "none" ) ) .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .minDocumentCount( 0 ) .orderByTermDescending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, /* * All indexed terms should appear in a bucket, in descending value order, with a count of zero. */ containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, 0L ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void minDocumentCount_negative(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; assertThatThrownBy( () -> index.createScope().aggregation().terms().field( fieldPath, fieldType.getJavaType() ) .minDocumentCount( -1 ) ) .isInstanceOf( IllegalArgumentException.class ) .hasMessageContaining( "'minDocumentCount'" ) .hasMessageContaining( "must be positive or zero" ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @TestForIssue(jiraKey = "HSEARCH-776") @PortedFromSearch5(original = "org.hibernate.search.test.query.facet.SimpleFacetingTest.testMaxFacetCounts") void maxTermCount_positive(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( 1 ) ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, /* * Only the bucket with the most documents should be returned. */ containsInAnyOrder( c -> { F valueWithMostDocuments = dataSet.valuesInDescendingDocumentCountOrder.get( 0 ); c.accept( valueWithMostDocuments, (long) dataSet.documentIdPerTerm.get( valueWithMostDocuments ).size() ); }, fieldType ) ); } /** * Test maxTermCount with a non-default sort by ascending term value. * The returned terms should be the "lowest" dataSet.values. */ @ParameterizedTest(name = "{0}") @MethodSource("params") void maxTermCount_positive_orderByTermAscending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( 1 ) .orderByTermAscending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, /* * Only the bucket with the "lowest" value should be returned. */ containsInAnyOrder( c -> { F lowestValue = dataSet.valuesInAscendingOrder.get( 0 ); c.accept( lowestValue, (long) dataSet.documentIdPerTerm.get( lowestValue ).size() ); }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void maxTermCount_positive_orderByCountAscending(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( 1 ) .orderByCountAscending() ) .routing( dataSet.name ) .toQuery() ) .aggregation( aggregationKey, /* * Only the bucket with the least documents should be returned. */ containsInAnyOrder( c -> { F valueWithLeastDocuments = dataSet.valuesInAscendingDocumentCountOrder.get( 0 ); c.accept( valueWithLeastDocuments, (long) dataSet.documentIdPerTerm.get( valueWithLeastDocuments ).size() ); }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void maxTermCount_zero(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; assertThatThrownBy( () -> index.createScope().aggregation().terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( 0 ) ) .isInstanceOf( IllegalArgumentException.class ) .hasMessageContaining( "'maxTermCount'" ) .hasMessageContaining( "must be strictly positive" ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void maxTermCount_negative(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; assertThatThrownBy( () -> index.createScope().aggregation().terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( -1 ) ) .isInstanceOf( IllegalArgumentException.class ) .hasMessageContaining( "'maxTermCount'" ) .hasMessageContaining( "must be strictly positive" ); } @ParameterizedTest(name = "{0}") @MethodSource("params") @TestForIssue(jiraKey = "HSEARCH-4544") void maxTermCount_integerMaxValue(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( Integer.MAX_VALUE ) ) .routing( dataSet.name ) ) .aggregation( aggregationKey, // All buckets should be returned. containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } // This is interesting even if we already test Integer.MAX_VALUE (see above), // because Lucene has some hardcoded limits for PriorityQueue sizes, // somewhere around 2147483631, which is lower than Integer.MAX_VALUE. @ParameterizedTest(name = "{0}") @MethodSource("params") @TestForIssue(jiraKey = "HSEARCH-4544") void maxTermCount_veryLarge(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .maxTermCount( 2_000_000_000 ) ) .routing( dataSet.name ) ) .aggregation( aggregationKey, // All buckets should be returned. containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void terms_explicitDocCount(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, Long>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) .value( f.count().documents() ) ) .routing( dataSet.name ) ) .aggregation( aggregationKey, // All buckets should be returned. containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, (long) dataSet.documentIdPerTerm.get( value ).size() ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void terms_min(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { assumeTrue( fieldType.supportsMetricAggregation(), "Since the value is a metric aggregation on the same field, we want to be sure that only those fields that support it are included." ); String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, F>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) // while maybe silly as min/max == the same term as the key it is here just to test the nesting and aggregations: .value( (AggregationFinalStep<F>) f.min().field( fieldPath, fieldType.getJavaType() ) ) ) .routing( dataSet.name ) ) .aggregation( aggregationKey, // All buckets should be returned. containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, fieldType.normalize( value ) ); } }, fieldType ) ); } @ParameterizedTest(name = "{0}") @MethodSource("params") void terms_max(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { assumeTrue( fieldType.supportsMetricAggregation(), "Since the value is a metric aggregation on the same field, we want to be sure that only those fields that support it are included." ); String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, F>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) // while maybe silly as min/max == the same term as the key it is here just to test the nesting and aggregations: .value( (AggregationFinalStep<F>) f.max().field( fieldPath, fieldType.getJavaType() ) ) ) .routing( dataSet.name ) ) .aggregation( aggregationKey, // All buckets should be returned. containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, fieldType.normalize( value ) ); } }, fieldType ) ); } @SuppressWarnings("unchecked") // for the eclipse compiler @ParameterizedTest(name = "{0}") @MethodSource("params") void terms_composite(FieldTypeDescriptor<F, ?> fieldType, DataSet<F> dataSet) { assumeTrue( fieldType.supportsMetricAggregation(), "Since the value is a metric aggregation on the same field, we want to be sure that only those fields that support it are included." ); String fieldPath = index.binding().fieldModels.get( fieldType ).relativeFieldName; AggregationKey<Map<F, F>> aggregationKey = AggregationKey.of( AGGREGATION_NAME ); assertThatQuery( matchAllQuery() .aggregation( aggregationKey, f -> f.terms().field( fieldPath, fieldType.getJavaType() ) // while maybe silly as min/max == the same term as the key it is here just to test the nesting and aggregations: .value( ( (CompositeAggregationFrom1AsStep<F>) f.composite() // cast here is for the eclipse compiler ... .from( f.max().field( fieldPath, fieldType.getJavaType() ) ) ) .as( Function.<F>identity() ) ) ) .routing( dataSet.name ) ) .aggregation( aggregationKey, // All buckets should be returned. containsInAnyOrder( c -> { for ( F value : dataSet.valuesInDescendingOrder ) { c.accept( value, fieldType.normalize( value ) ); } }, fieldType ) ); } private SearchQueryOptionsStep<Object, ?, DocumentReference, ?, ?, ?> matchAllQuery() { return index.createScope().query().where( f -> f.matchAll() ); } @SuppressWarnings("unchecked") private <K, V> Consumer<Map<F, V>> containsExactly(Consumer<BiConsumer<F, V>> expectationBuilder, FieldTypeDescriptor<F, ?> fieldType) { List<Map.Entry<F, V>> expected = new ArrayList<>(); expectationBuilder.accept( (k, v) -> expected.add( entry( fieldType.toExpectedDocValue( k ), v ) ) ); return actual -> assertThat( normalize( actual ) ) .containsExactly( normalize( expected ).toArray( new Map.Entry[0] ) ); } @SuppressWarnings("unchecked") private <K, V> Consumer<Map<K, V>> containsInAnyOrder(Consumer<BiConsumer<F, V>> expectationBuilder, FieldTypeDescriptor<F, ?> fieldType) { List<Map.Entry<F, V>> expected = new ArrayList<>(); expectationBuilder.accept( (k, v) -> expected.add( entry( fieldType.toExpectedDocValue( k ), v ) ) ); return actual -> assertThat( normalize( actual ).entrySet() ) .containsExactlyInAnyOrder( normalize( expected ).toArray( new Map.Entry[0] ) ); } private static class DataSet<F> { final FieldTypeDescriptor<F, ?> fieldType; final String name; final Map<F, List<String>> documentIdPerTerm; final List<F> valuesInAscendingOrder; final List<F> valuesInDescendingOrder; final List<F> valuesInAscendingDocumentCountOrder; final List<F> valuesInDescendingDocumentCountOrder; private DataSet(FieldTypeDescriptor<F, ?> fieldType) { this.fieldType = fieldType; this.name = fieldType.getUniqueName(); this.documentIdPerTerm = new LinkedHashMap<>(); this.valuesInAscendingOrder = fieldType.getAscendingUniqueTermValues().getSingle(); this.valuesInDescendingOrder = new ArrayList<>( valuesInAscendingOrder ); Collections.reverse( valuesInDescendingOrder ); this.valuesInDescendingDocumentCountOrder = new ArrayList<>( valuesInAscendingOrder ); /* * Mess with the value order, because some tests would be pointless * if the document count order was the same as (or the opposite of) the value order */ valuesInDescendingDocumentCountOrder.add( valuesInDescendingDocumentCountOrder.get( 0 ) ); valuesInDescendingDocumentCountOrder.remove( 0 ); valuesInDescendingDocumentCountOrder.add( valuesInDescendingDocumentCountOrder.get( 0 ) ); valuesInDescendingDocumentCountOrder.remove( 0 ); this.valuesInAscendingDocumentCountOrder = new ArrayList<>( valuesInDescendingDocumentCountOrder ); Collections.reverse( valuesInAscendingDocumentCountOrder ); // Simple dataset: strictly decreasing number of documents for each term int documentIdAsInteger = 0; int numberOfDocuments = valuesInDescendingDocumentCountOrder.size(); for ( F value : valuesInDescendingDocumentCountOrder ) { ArrayList<String> documentIdsForTerm = new ArrayList<>(); documentIdPerTerm.put( value, documentIdsForTerm ); for ( int i = 0; i < numberOfDocuments; i++ ) { String documentId = name + "_document_" + documentIdAsInteger; ++documentIdAsInteger; documentIdsForTerm.add( documentId ); } --numberOfDocuments; } } private void init() { BulkIndexer indexer = index.bulkIndexer(); for ( Map.Entry<F, List<String>> entry : documentIdPerTerm.entrySet() ) { F value = entry.getKey(); for ( String documentId : entry.getValue() ) { indexer.add( documentId, name, document -> { document.addValue( index.binding().fieldModels.get( fieldType ).reference, value ); document.addValue( index.binding().fieldWithConverterModels.get( fieldType ).reference, value ); } ); } } indexer.add( name + "_document_empty", name, document -> {} ); indexer.join(); } } private static class IndexBinding { final SimpleFieldModelsByType fieldModels; final SimpleFieldModelsByType fieldWithConverterModels; final SimpleFieldModelsByType fieldWithAggregationDisabledModels; IndexBinding(IndexSchemaElement root) { fieldModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "", c -> c.aggregable( Aggregable.YES ) .searchable( Searchable.NO ) // Terms aggregations should not need this ); fieldWithConverterModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "converted_", c -> c.aggregable( Aggregable.YES ) .dslConverter( ValueWrapper.class, ValueWrapper.toDocumentValueConverter() ) .projectionConverter( ValueWrapper.class, ValueWrapper.fromDocumentValueConverter() ) ); fieldWithAggregationDisabledModels = SimpleFieldModelsByType.mapAll( supportedFieldTypes, root, "nonAggregable_", c -> c.aggregable( Aggregable.NO ) ); } } }
apache/hive
35,333
ql/src/test/org/apache/hadoop/hive/ql/exec/TestFileSinkOperator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec; import org.junit.After; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; import org.apache.hadoop.hive.ql.CompilationOpContext; import org.apache.hadoop.hive.ql.io.AcidInputFormat; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.AcidUtils; import org.apache.hadoop.hive.ql.io.RecordIdentifier; import org.apache.hadoop.hive.ql.io.RecordUpdater; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.stats.StatsAggregator; import org.apache.hadoop.hive.ql.stats.StatsCollectionContext; import org.apache.hadoop.hive.ql.stats.StatsPublisher; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.Progressable; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.mockito.Mockito; import java.io.DataInput; import java.io.DataOutput; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.stream.Collectors; /** * Tests for {@link org.apache.hadoop.hive.ql.exec.FileSinkOperator} */ public class TestFileSinkOperator { private static String PARTCOL_NAME = "partval"; private static final String tmpPrefix = "-tmp."; static final private Logger LOG = LoggerFactory.getLogger(TestFileSinkOperator.class.getName()); private static File tmpdir; private static TableDesc nonAcidTableDescriptor; private static TableDesc acidTableDescriptor; private static ObjectInspector inspector; private static List<Row> rows; private Path basePath; private JobConf jc; @BeforeClass public static void classSetup() { Properties properties = new Properties(); properties.setProperty(serdeConstants.SERIALIZATION_LIB, TFSOSerDe.class.getName()); properties.setProperty(hive_metastoreConstants.META_TABLE_NAME, "tfs"); nonAcidTableDescriptor = new TableDesc(TFSOInputFormat.class, TFSOOutputFormat.class, properties); properties.setProperty(serdeConstants.LIST_COLUMNS,"data"); properties = new Properties(properties); properties.setProperty(hive_metastoreConstants.BUCKET_COUNT, "1"); acidTableDescriptor = new TableDesc(TFSOInputFormat.class, TFSOOutputFormat.class, properties); tmpdir = new File(System.getProperty("java.io.tmpdir") + System.getProperty("file.separator") + "testFileSinkOperator"); tmpdir.mkdir(); tmpdir.deleteOnExit(); } @Test public void testNonAcidWrite() throws Exception { setBasePath("write"); setupData(DataFormat.WITH_PARTITION_VALUE); FileSinkOperator op = getFileSink(AcidUtils.Operation.NOT_ACID, false, 0); processRows(op); confirmOutput(DataFormat.WITH_PARTITION_VALUE); } @Test public void testInsert() throws Exception { setBasePath("insert"); setupData(DataFormat.WITH_PARTITION_VALUE); FileSinkOperator op = getFileSink(AcidUtils.Operation.INSERT, false, 1); processRows(op); Assert.assertEquals("10", TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT)); confirmOutput(DataFormat.WITH_PARTITION_VALUE); } @Test public void testUpdate() throws Exception { setBasePath("update"); setupData(DataFormat.WITH_RECORD_ID); FileSinkOperator op = getFileSink(AcidUtils.Operation.UPDATE, false, 2); processRows(op); Assert.assertEquals("0", TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT)); confirmOutput(DataFormat.WITH_RECORD_ID); } @Test public void testDelete() throws Exception { setBasePath("delete"); setupData(DataFormat.WITH_RECORD_ID); FileSinkOperator op = getFileSink(AcidUtils.Operation.DELETE, false, 2); processRows(op); Assert.assertEquals("-10", TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT)); confirmOutput(DataFormat.WITH_RECORD_ID); } @Test public void testNonAcidDynamicPartitioning() throws Exception { setBasePath("writeDP"); setupData(DataFormat.WITH_PARTITION_VALUE); FileSinkOperator op = getFileSink(AcidUtils.Operation.NOT_ACID, true, 0); processRows(op); confirmOutput(DataFormat.WITH_PARTITION_VALUE); } @Test public void testNonAcidRemoveDuplicate() throws Exception { setBasePath("writeDuplicate"); setupData(DataFormat.WITH_PARTITION_VALUE); FileSinkDesc desc = (FileSinkDesc) getFileSink(AcidUtils.Operation.NOT_ACID, true, 0).getConf().clone(); Path linkedDir = desc.getDirName(); desc.setLinkedFileSink(true); desc.setDirName(new Path(linkedDir, AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "0")); JobConf jobConf = new JobConf(jc); jobConf.set("hive.execution.engine", "tez"); jobConf.set("mapred.task.id", "000000_0"); FileSinkOperator op1 = (FileSinkOperator)OperatorFactory.get(new CompilationOpContext(), FileSinkDesc.class); op1.setConf(desc); op1.initialize(jobConf, new ObjectInspector[]{inspector}); JobConf jobConf2 = new JobConf(jobConf); jobConf2.set("mapred.task.id", "000000_1"); FileSinkOperator op2 = (FileSinkOperator)OperatorFactory.get( new CompilationOpContext(), FileSinkDesc.class); op2.setConf(desc); op2.initialize(jobConf2, new ObjectInspector[]{inspector}); // Another sub-query in union JobConf jobConf3 = new JobConf(jobConf); jobConf3.set("mapred.task.id", "000001_0"); FileSinkOperator op3 = (FileSinkOperator)OperatorFactory.get( new CompilationOpContext(), FileSinkDesc.class); FileSinkDesc sinkDesc = (FileSinkDesc) desc.clone(); sinkDesc.setDirName(new Path(linkedDir, AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "1")); op3.setConf(sinkDesc); op3.initialize(jobConf3, new ObjectInspector[]{inspector}); JobConf jobConf4 = new JobConf(jobConf); jobConf4.set("mapred.task.id", "000001_1"); FileSinkOperator op4 = (FileSinkOperator)OperatorFactory.get( new CompilationOpContext(), FileSinkDesc.class); op4.setConf(sinkDesc); op4.initialize(jobConf4, new ObjectInspector[]{inspector}); for (Object r : rows) { op1.process(r, 0); op2.process(r, 0); op3.process(r, 0); op4.process(r, 0); } op1.close(false); // Assume op2 also ends successfully, this happens in different containers op2.close(false); op3.close(false); op4.close(false); Path[] paths = findFilesInPath(linkedDir); // = findFilesInBasePath() # use findFilesInBasePath before the fix Set<String> fileNames = Arrays.stream(paths) .filter(path -> path.getParent().toString().endsWith("partval=Monday/HIVE_UNION_SUBDIR_0")) .map(path -> path.getName()) .collect(Collectors.toSet()); Assert.assertEquals("Two result files are expected", 2, fileNames.size()); Assert.assertTrue("000000_1 file is expected", fileNames.contains("000000_1")); Assert.assertTrue("000000_0 file is expected", fileNames.contains("000000_0")); fileNames = Arrays.stream(paths) .filter(path -> path.getParent().toString().endsWith("partval=Monday/HIVE_UNION_SUBDIR_1")) .map(path -> path.getName()) .collect(Collectors.toSet()); Assert.assertEquals("Two result files are expected", 2, fileNames.size()); Assert.assertTrue("000001_0 file is expected", fileNames.contains("000001_0")); Assert.assertTrue("000001_1 file is expected", fileNames.contains("000001_1")); // Close op3 first to see if it can deduplicate the result under HIVE_UNION_SUBDIR_0 op3.jobCloseOp(jobConf, true); // This happens in HiveServer2 when the job is finished, the job will call // jobCloseOp to end his operators. For the FileSinkOperator, a deduplication on the // output files may happen so that only one output file is left for each yarn task. op1.jobCloseOp(jobConf, true); List<Path> resultFiles = new ArrayList<Path>(); String linkedDirPath = linkedDir.toUri().getPath(); recurseOnPath(linkedDir, linkedDir.getFileSystem(jc), resultFiles); List<Path> mondays = resultFiles.stream() .filter(path -> path.getParent().toUri().getPath() .equals(linkedDirPath + "/partval=Monday/HIVE_UNION_SUBDIR_0")) .collect(Collectors.toList()); Assert.assertEquals("Only 1 file should be here after cleaning", 1, mondays.size()); Assert.assertEquals("000000_1 file is expected", "000000_1", mondays.get(0).getName()); List<Path> subdir1 = resultFiles.stream() .filter(path -> path.getParent().getName().equals("HIVE_UNION_SUBDIR_1")).sorted() .collect(Collectors.toList()); Assert.assertEquals("Two partitions expected", 2, subdir1.size()); Path monday = subdir1.get(0), tuesday = subdir1.get(1); Assert.assertEquals("Only 1 file left under the partition after deduplication", monday.toUri().getPath(), linkedDirPath + "/partval=Monday/HIVE_UNION_SUBDIR_1/000001_1"); Assert.assertEquals("Only 1 file left under the partition after deduplication", tuesday.toUri().getPath(), linkedDirPath + "/partval=Tuesday/HIVE_UNION_SUBDIR_1/000001_1"); // Confirm the output confirmOutput(DataFormat.WITH_PARTITION_VALUE, resultFiles.stream() .filter(p -> p.getParent().getName().equals("HIVE_UNION_SUBDIR_0")).sorted() .collect(Collectors.toList()).toArray(new Path[0])); confirmOutput(DataFormat.WITH_PARTITION_VALUE, subdir1.toArray(new Path[0])); } @Test public void testInsertDynamicPartitioning() throws Exception { setBasePath("insertDP"); setupData(DataFormat.WITH_PARTITION_VALUE); FileSinkOperator op = getFileSink(AcidUtils.Operation.INSERT, true, 1); processRows(op); // We only expect 5 here because we'll get whichever of the partitions published its stats // last. Assert.assertEquals("5", TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT)); confirmOutput(DataFormat.WITH_PARTITION_VALUE); } @Test public void testUpdateDynamicPartitioning() throws Exception { setBasePath("updateDP"); setupData(DataFormat.WITH_RECORD_ID_AND_PARTITION_VALUE); FileSinkOperator op = getFileSink(AcidUtils.Operation.UPDATE, true, 2); processRows(op); Assert.assertEquals("0", TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT)); confirmOutput(DataFormat.WITH_RECORD_ID_AND_PARTITION_VALUE); } @Test public void testDeleteDynamicPartitioning() throws Exception { setBasePath("deleteDP"); setupData(DataFormat.WITH_RECORD_ID); FileSinkOperator op = getFileSink(AcidUtils.Operation.DELETE, true, 2); processRows(op); // We only expect -5 here because we'll get whichever of the partitions published its stats // last. Assert.assertEquals("-5", TFSOStatsPublisher.stats.get(StatsSetupConst.ROW_COUNT)); confirmOutput(DataFormat.WITH_RECORD_ID); } @Before public void setup() throws Exception { jc = new JobConf(); jc.set(HiveConf.ConfVars.HIVE_STATS_DEFAULT_PUBLISHER.varname, TFSOStatsPublisher.class.getName()); jc.set(HiveConf.ConfVars.HIVE_STATS_DEFAULT_AGGREGATOR.varname, TFSOStatsAggregator.class.getName()); jc.set(HiveConf.ConfVars.HIVE_STATS_DBCLASS.varname, "custom"); } @After public void afterTest() throws Exception { Path parent = basePath.getParent(); String last = basePath.getName(); FileSystem fs = basePath.getFileSystem(jc); fs.delete(basePath, true); fs.delete(new Path(parent, "_tmp." + last), true); fs.delete(new Path(parent, "_task_tmp." + last), true); } private void setBasePath(String testName) { basePath = new Path(new File(tmpdir, testName).getPath()); } private enum DataFormat {WITH_RECORD_ID, WITH_PARTITION_VALUE, WITH_RECORD_ID_AND_PARTITION_VALUE}; private void setupData(DataFormat format) { Class<?> rType; switch (format) { case WITH_PARTITION_VALUE: rType = RowWithPartVal.class; break; case WITH_RECORD_ID: rType = RowWithRecID.class; break; case WITH_RECORD_ID_AND_PARTITION_VALUE: rType = RowWithPartNRecID.class; break; default: throw new RuntimeException("Unknown type"); } inspector = ObjectInspectorFactory.getReflectionObjectInspector (rType, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); rows = new ArrayList<Row>(); Row r; for (int i = 0; i < 10; i++) { switch (format) { case WITH_PARTITION_VALUE: r = new RowWithPartVal( new Text("mary had a little lamb"), (i < 5) ? new Text("Monday") : new Text("Tuesday")); break; case WITH_RECORD_ID: r = new RowWithRecID(new RecordIdentifier(1, 1, i), (i < 5) ? new Text("Monday") : new Text("Tuesday")); break; case WITH_RECORD_ID_AND_PARTITION_VALUE: r = new RowWithPartNRecID( new Text("its fleect was white as snow"), (i < 5) ? new Text("Monday") : new Text("Tuesday"), new RecordIdentifier(1, 1, i)); break; default: throw new RuntimeException("Unknown data format"); } rows.add(r); } } private FileSinkOperator getFileSink(AcidUtils.Operation writeType, boolean dynamic, long writeId) throws IOException, HiveException { TableDesc tableDesc = null; switch (writeType) { case DELETE: case UPDATE: case INSERT: tableDesc = acidTableDescriptor; break; case NOT_ACID: tableDesc = nonAcidTableDescriptor; break; } FileSinkDesc desc = null; if (dynamic) { ArrayList<ExprNodeDesc> partCols = new ArrayList<ExprNodeDesc>(1); partCols.add(new ExprNodeColumnDesc(TypeInfoFactory.stringTypeInfo, PARTCOL_NAME, "a", true)); Map<String, String> partColMap= new LinkedHashMap<String, String>(1); partColMap.put(PARTCOL_NAME, null); DynamicPartitionCtx dpCtx = new DynamicPartitionCtx(partColMap, "Sunday", 100); //todo: does this need the finalDestination? desc = new FileSinkDesc(basePath, tableDesc, false, 1, false, false, 1, 1, partCols, dpCtx, null, false, false, false, false, false, writeType, false); } else { desc = new FileSinkDesc(basePath, tableDesc, false); } desc.setStatsAggPrefix(basePath.toString()); desc.setWriteType(writeType); desc.setGatherStats(true); if (writeId > 0) { desc.setTableWriteId(writeId); } if (writeType != AcidUtils.Operation.NOT_ACID) { desc.setTableWriteId(1L); } FileSinkOperator op = (FileSinkOperator)OperatorFactory.get( new CompilationOpContext(), FileSinkDesc.class); op.setConf(desc); op.initialize(jc, new ObjectInspector[]{inspector}); return op; } private void processRows(FileSinkOperator op) throws HiveException { for (Object r : rows) op.process(r, 0); op.jobCloseOp(jc, true); op.close(false); } private void confirmOutput(DataFormat rType) throws IOException, SerDeException, CloneNotSupportedException { confirmOutput(rType, findFilesInBasePath()); } private void confirmOutput(DataFormat rType, Path[] paths) throws IOException, SerDeException, CloneNotSupportedException { TFSOInputFormat input = new TFSOInputFormat(rType); FileInputFormat.setInputPaths(jc, paths); InputSplit[] splits = input.getSplits(jc, 1); RecordReader<NullWritable, Row> reader = input.getRecordReader(splits[0], jc, Mockito.mock(Reporter.class)); NullWritable key = reader.createKey(); Row value = reader.createValue(); List<Row> results = new ArrayList<Row>(rows.size()); List<Row> sortedRows = new ArrayList<Row>(rows.size()); for (int i = 0; i < rows.size(); i++) { Assert.assertTrue(reader.next(key, value)); results.add(value.clone()); sortedRows.add(rows.get(i)); } Assert.assertFalse(reader.next(key, value)); Collections.sort(results); Collections.sort(sortedRows); for (int i = 0; i < rows.size(); i++) { Assert.assertTrue(sortedRows.get(i).equals(results.get(i))); } } private Path[] findFilesInBasePath() throws IOException { Path parent = basePath.getParent(); String last = basePath.getName(); Path tmpPath = new Path(parent, tmpPrefix + last); FileSystem fs = basePath.getFileSystem(jc); List<Path> paths = new ArrayList<Path>(); recurseOnPath(tmpPath, fs, paths); return paths.toArray(new Path[paths.size()]); } private Path[] findFilesInPath(Path path) throws IOException { FileSystem fs = path.getFileSystem(jc); List<Path> paths = new ArrayList<Path>(); recurseOnPath(path, fs, paths); return paths.toArray(new Path[paths.size()]); } private void recurseOnPath(Path p, FileSystem fs, List<Path> paths) throws IOException { if (fs.getFileStatus(p).isDir()) { FileStatus[] stats = fs.listStatus(p); for (FileStatus stat : stats) recurseOnPath(stat.getPath(), fs, paths); } else { paths.add(p); } } public static interface Row extends WritableComparable<Row> { Row clone() throws CloneNotSupportedException; } private static class RowWithRecID implements Row { private RecordIdentifier recId; private Text partVal; public RowWithRecID() { } public RowWithRecID(RecordIdentifier recId, Text partVal) { super(); this.recId = recId; this.partVal = partVal; } @Override public Row clone() throws CloneNotSupportedException { return new RowWithRecID(this.recId, this.partVal); } @Override public void write(DataOutput dataOutput) throws IOException { if (partVal == null) { dataOutput.writeBoolean(false); } else { dataOutput.writeBoolean(true); partVal.write(dataOutput); } if (recId == null) { dataOutput.writeBoolean(false); } else { dataOutput.writeBoolean(true); recId.write(dataOutput); } } @Override public void readFields(DataInput dataInput) throws IOException { boolean notNull = dataInput.readBoolean(); if (notNull) { partVal = new Text(); partVal.readFields(dataInput); } notNull = dataInput.readBoolean(); if (notNull) { recId = new RecordIdentifier(); recId.readFields(dataInput); } } @Override public int compareTo(Row row) { RowWithRecID other = (RowWithRecID) row; if (recId == null && other.recId == null) { return comparePartVal(other); } else if (recId == null) { return -1; } else { int rc = recId.compareTo(other.recId); if (rc == 0) return comparePartVal(other); else return rc; } } private int comparePartVal(RowWithRecID other) { return partVal.compareTo(other.partVal); } @Override public boolean equals(Object obj) { return compareTo((RowWithRecID)obj) == 0; } } private static class RowWithPartVal implements Row { public RowWithPartVal(Text data, Text partVal) { super(); this.data = data; this.partVal = partVal; } public RowWithPartVal() { } private Text data; private Text partVal; @Override public Row clone() throws CloneNotSupportedException { return new RowWithPartVal(this.data, this.partVal); } @Override public void write(DataOutput dataOutput) throws IOException { data.write(dataOutput); if (partVal == null) { dataOutput.writeBoolean(false); } else { dataOutput.writeBoolean(true); partVal.write(dataOutput); } } @Override public void readFields(DataInput dataInput) throws IOException { data = new Text(); data.readFields(dataInput); boolean notNull = dataInput.readBoolean(); if (notNull) { partVal = new Text(); partVal.readFields(dataInput); } } @Override public int compareTo(Row row) { RowWithPartVal other = (RowWithPartVal) row; if (partVal == null && other.partVal == null) { return compareData(other); } else if (partVal == null) { return -1; } else { int rc = partVal.compareTo(other.partVal); if (rc == 0) return compareData(other); else return rc; } } private int compareData(RowWithPartVal other) { if (data == null && other.data == null) return 0; else if (data == null) return -1; else return data.compareTo(other.data); } @Override public boolean equals(Object obj) { if (obj instanceof RowWithPartVal) { RowWithPartVal other = (RowWithPartVal) obj; return compareTo(other) == 0; } else { return false; } } } private static class RowWithPartNRecID implements Row { private RecordIdentifier recId; private Text data; private Text partVal; RowWithPartNRecID() { this(null, null, null); } RowWithPartNRecID(Text t, Text pv, RecordIdentifier ri) { data = t; partVal = pv; recId = ri; } @Override public RowWithPartNRecID clone() throws CloneNotSupportedException { return new RowWithPartNRecID(this.data, this.partVal, this.recId); } @Override public void write(DataOutput dataOutput) throws IOException { data.write(dataOutput); if (partVal == null) { dataOutput.writeBoolean(false); } else { dataOutput.writeBoolean(true); partVal.write(dataOutput); } if (recId == null) { dataOutput.writeBoolean(false); } else { dataOutput.writeBoolean(true); recId.write(dataOutput); } } @Override public void readFields(DataInput dataInput) throws IOException { data = new Text(); data.readFields(dataInput); boolean notNull = dataInput.readBoolean(); if (notNull) { partVal = new Text(); partVal.readFields(dataInput); } notNull = dataInput.readBoolean(); if (notNull) { recId = new RecordIdentifier(); recId.readFields(dataInput); } } @Override public boolean equals(Object obj) { if (obj instanceof RowWithPartNRecID) { RowWithPartNRecID other = (RowWithPartNRecID) obj; if (data == null && other.data == null) return checkPartVal(other); else if (data == null) return false; else if (data.equals(other.data)) return checkPartVal(other); else return false; } else { return false; } } private boolean checkPartVal(RowWithPartNRecID other) { if (partVal == null && other.partVal == null) return checkRecId(other); else if (partVal == null) return false; else if (partVal.equals(other.partVal)) return checkRecId(other); else return false; } private boolean checkRecId(RowWithPartNRecID other) { if (recId == null && other.recId == null) return true; else if (recId == null) return false; else return recId.equals(other.recId); } @Override public int compareTo(Row row) { RowWithPartNRecID other = (RowWithPartNRecID) row; if (recId == null && other.recId == null) { return comparePartVal(other); } else if (recId == null) { return -1; } else { int rc = recId.compareTo(other.recId); if (rc == 0) return comparePartVal(other); else return rc; } } private int comparePartVal(RowWithPartNRecID other) { if (partVal == null && other.partVal == null) { return compareData(other); } else if (partVal == null) { return -1; } else { int rc = partVal.compareTo(other.partVal); if (rc == 0) return compareData(other); else return rc; } } private int compareData(RowWithPartNRecID other) { if (data == null && other.data == null) return 0; else if (data == null) return -1; else return data.compareTo(other.data); } } private static class TFSOInputFormat extends FileInputFormat<NullWritable, Row> implements AcidInputFormat<NullWritable, Row> { FSDataInputStream in[] = null; int readingFrom = -1; DataFormat rType; public TFSOInputFormat(DataFormat rType) { this.rType = rType; } @Override public RecordReader<NullWritable, Row> getRecordReader( InputSplit inputSplit, JobConf entries, Reporter reporter) throws IOException { if (in == null) { Path paths[] = FileInputFormat.getInputPaths(entries); in = new FSDataInputStream[paths.length]; FileSystem fs = paths[0].getFileSystem(entries); for (int i = 0; i < paths.length; i++) { in[i] = fs.open(paths[i]); } readingFrom = 0; } return new RecordReader<NullWritable, Row>() { @Override public boolean next(NullWritable nullWritable, Row tfsoRecord) throws IOException { try { tfsoRecord.readFields(in[readingFrom]); return true; } catch (EOFException e) { in[readingFrom].close(); if (++readingFrom >= in.length) return false; else return next(nullWritable, tfsoRecord); } } @Override public NullWritable createKey() { return NullWritable.get(); } @Override public Row createValue() { switch (rType) { case WITH_RECORD_ID_AND_PARTITION_VALUE: return new RowWithPartNRecID(); case WITH_PARTITION_VALUE: return new RowWithPartVal(); case WITH_RECORD_ID: return new RowWithRecID(); default: throw new RuntimeException("Unknown row Type"); } } @Override public long getPos() throws IOException { return 0L; } @Override public void close() throws IOException { } @Override public float getProgress() throws IOException { return 0.0f; } }; } @Override public RowReader<Row> getReader(InputSplit split, Options options) throws IOException { return null; } @Override public RawReader<Row> getRawReader(Configuration conf, boolean collapseEvents, int bucket, ValidWriteIdList validWriteIdList, Path baseDirectory, Path[] deltaDirectory, Map<String,Integer> deltaToAttemptId) throws IOException { return null; } @Override public boolean validateInput(FileSystem fs, HiveConf conf, List<FileStatus> files) throws IOException { return false; } } public static class TFSOOutputFormat extends FileOutputFormat<NullWritable, Row> implements AcidOutputFormat<NullWritable, Row> { List<Row> records = new ArrayList<>(); long numRecordsAdded = 0; FSDataOutputStream out = null; @Override public RecordUpdater getRecordUpdater(final Path path, final Options options) throws IOException { return new RecordUpdater() { @Override public void insert(long currentWriteId, Object row) throws IOException { addRow(row); numRecordsAdded++; } @Override public void update(long currentWriteId, Object row) throws IOException { addRow(row); } @Override public void delete(long currentWriteId, Object row) throws IOException { addRow(row); numRecordsAdded--; } private void addRow(Object row) { assert row instanceof Row : "Expected Row but got " + row.getClass().getName(); records.add((Row)row); } @Override public void flush() throws IOException { if (out == null) { FileSystem fs = path.getFileSystem(options.getConfiguration()); out = fs.create(path); } for (Writable r : records) r.write(out); records.clear(); out.flush(); } @Override public void close(boolean abort) throws IOException { flush(); out.close(); } @Override public SerDeStats getStats() { SerDeStats stats = new SerDeStats(); stats.setRowCount(numRecordsAdded); return stats; } @Override public long getBufferedRowCount() { return records.size(); } @Override public Path getUpdatedFilePath() { return null; } }; } @Override public FileSinkOperator.RecordWriter getRawRecordWriter(Path path, Options options) throws IOException { return null; } @Override public FileSinkOperator.RecordWriter getHiveRecordWriter(final JobConf jc, final Path finalOutPath, Class<? extends Writable> valueClass, boolean isCompressed, Properties tableProperties, Progressable progress) throws IOException { return new FileSinkOperator.RecordWriter() { @Override public void write(Writable w) throws IOException { Assert.assertTrue(w instanceof Row); records.add((Row)w); } @Override public void close(boolean abort) throws IOException { if (out == null) { FileSystem fs = finalOutPath.getFileSystem(jc); out = fs.create(finalOutPath); } for (Writable r : records) r.write(out); records.clear(); out.flush(); out.close(); } }; } @Override public RecordWriter<NullWritable, Row> getRecordWriter( FileSystem fileSystem, JobConf entries, String s, Progressable progressable) throws IOException { return null; } @Override public void checkOutputSpecs(FileSystem fileSystem, JobConf entries) throws IOException { } } public static class TFSOSerDe extends AbstractSerDe { @Override public void initialize(Configuration configuration, Properties tableProperties, Properties partitionProperties) throws SerDeException { } @Override public Class<? extends Writable> getSerializedClass() { return RowWithPartNRecID.class; } @Override public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException { assert obj instanceof Row : "Expected Row or decendent, got " + obj.getClass().getName(); return (Row)obj; } @Override public Object deserialize(Writable blob) throws SerDeException { assert blob instanceof Row : "Expected Row or decendent, got "+ blob.getClass().getName(); return blob; } @Override public ObjectInspector getObjectInspector() throws SerDeException { return null; } } public static class TFSOStatsPublisher implements StatsPublisher { static Map<String, String> stats; @Override public boolean init(StatsCollectionContext context) { return true; } @Override public boolean connect(StatsCollectionContext context) { return true; } @Override public boolean publishStat(String fileID, Map<String, String> stats) { this.stats = stats; return true; } @Override public boolean closeConnection(StatsCollectionContext context) { return true; } } public static class TFSOStatsAggregator implements StatsAggregator { @Override public boolean connect(StatsCollectionContext scc) { return true; } @Override public String aggregateStats(String keyPrefix, String statType) { return null; } @Override public boolean closeConnection(StatsCollectionContext scc) { return true; } } }
apache/flink-kubernetes-operator
35,595
flink-kubernetes-operator/src/test/java/org/apache/flink/kubernetes/operator/controller/FlinkSessionJobControllerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.kubernetes.operator.controller; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.configuration.Configuration; import org.apache.flink.kubernetes.operator.TestUtils; import org.apache.flink.kubernetes.operator.TestingFlinkService; import org.apache.flink.kubernetes.operator.api.FlinkDeployment; import org.apache.flink.kubernetes.operator.api.FlinkSessionJob; import org.apache.flink.kubernetes.operator.api.lifecycle.ResourceLifecycleState; import org.apache.flink.kubernetes.operator.api.spec.FlinkVersion; import org.apache.flink.kubernetes.operator.api.spec.JobState; import org.apache.flink.kubernetes.operator.api.spec.UpgradeMode; import org.apache.flink.kubernetes.operator.api.status.FlinkSessionJobReconciliationStatus; import org.apache.flink.kubernetes.operator.api.status.ReconciliationState; import org.apache.flink.kubernetes.operator.config.FlinkConfigManager; import org.apache.flink.kubernetes.operator.config.KubernetesOperatorConfigOptions; import org.apache.flink.kubernetes.operator.observer.JobStatusObserver; import org.apache.flink.kubernetes.operator.service.CheckpointHistoryWrapper; import org.apache.flink.kubernetes.operator.utils.EventRecorder; import org.apache.flink.runtime.client.JobStatusMessage; import org.apache.flink.util.SerializedThrowable; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.server.mock.EnableKubernetesMockClient; import io.javaoperatorsdk.operator.api.reconciler.Context; import io.javaoperatorsdk.operator.api.reconciler.UpdateControl; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.EnumSource; import java.util.ArrayList; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import static org.apache.flink.api.common.JobStatus.CANCELLING; import static org.apache.flink.api.common.JobStatus.RECONCILING; import static org.apache.flink.api.common.JobStatus.RUNNING; import static org.apache.flink.kubernetes.operator.TestUtils.MAX_RECONCILE_TIMES; import static org.apache.flink.kubernetes.operator.config.KubernetesOperatorConfigOptions.SNAPSHOT_RESOURCE_ENABLED; import static org.apache.flink.kubernetes.operator.utils.EventRecorder.Reason.ValidationError; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; /** {@link FlinkSessionJobController} tests. */ @EnableKubernetesMockClient(crud = true) class FlinkSessionJobControllerTest { private KubernetesClient kubernetesClient; private final FlinkConfigManager configManager = new FlinkConfigManager(new Configuration()); private Context context; private TestingFlinkService flinkService = new TestingFlinkService(); private TestingFlinkSessionJobController testController; private FlinkSessionJob sessionJob = TestUtils.buildSessionJob(); private FlinkSessionJob suspendedSessionJob = TestUtils.buildSessionJob(JobState.SUSPENDED); @BeforeEach public void before() { flinkService = new TestingFlinkService(); testController = new TestingFlinkSessionJobController(configManager, flinkService); sessionJob = TestUtils.buildSessionJob(); suspendedSessionJob = TestUtils.buildSessionJob(JobState.SUSPENDED); kubernetesClient.resource(sessionJob).createOrReplace(); context = TestUtils.createContextWithReadyFlinkDeployment(kubernetesClient); } @Test public void testSubmitJobButException() { flinkService.setDeployFailure(true); try { testController.reconcile(sessionJob, context); } catch (Exception e) { // Ignore } Assertions.assertEquals(2, testController.events().size()); // Discard submit event testController.events().remove(); var event = testController.events().remove(); Assertions.assertEquals(EventRecorder.Type.Warning.toString(), event.getType()); Assertions.assertEquals("Error", event.getReason()); testController.cleanup(sessionJob, context); } @Test public void verifyBasicReconcileLoop() throws Exception { UpdateControl<FlinkSessionJob> updateControl; assertEquals( ReconciliationState.UPGRADING, sessionJob.getStatus().getReconciliationStatus().getState()); assertNull(sessionJob.getStatus().getJobStatus().getState()); verifyNormalBasicReconcileLoop(sessionJob); // Send in invalid update sessionJob.getSpec().getJob().setParallelism(-1); updateControl = testController.reconcile(sessionJob, context); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); assertEquals(6, testController.getInternalStatusUpdateCount()); assertFalse(updateControl.isPatchStatus()); FlinkSessionJobReconciliationStatus reconciliationStatus = sessionJob.getStatus().getReconciliationStatus(); assertTrue( sessionJob .getStatus() .getError() .contains("Job parallelism must be larger than 0")); assertNotNull(reconciliationStatus.deserializeLastReconciledSpec().getJob()); // Validate job status correct even with error var jobStatus = sessionJob.getStatus().getJobStatus(); JobStatusMessage expectedJobStatus = flinkService.listJobs().get(0).f1; assertEquals(expectedJobStatus.getJobId().toHexString(), jobStatus.getJobId()); assertEquals(expectedJobStatus.getJobName(), jobStatus.getJobName()); assertEquals(expectedJobStatus.getJobState(), jobStatus.getState()); // Validate last stable spec is still the old one assertEquals( sessionJob.getStatus().getReconciliationStatus().getLastReconciledSpec(), sessionJob.getStatus().getReconciliationStatus().getLastStableSpec()); } @Test public void verifyBasicReconcileLoopForSuspendedSessionJob() throws Exception { assertEquals( ReconciliationState.UPGRADING, suspendedSessionJob.getStatus().getReconciliationStatus().getState()); assertNull(suspendedSessionJob.getStatus().getJobStatus().getState()); int reconcileTimes = 0; while (reconcileTimes < MAX_RECONCILE_TIMES) { verifyReconcileInitialSuspendedDeployment(suspendedSessionJob); reconcileTimes++; } suspendedSessionJob.getSpec().getJob().setState(JobState.RUNNING); verifyNormalBasicReconcileLoop(suspendedSessionJob); } @Test public void verifyReconcileLoopForInitialSuspendedSessionJobWithSavepoint() throws Exception { assertEquals( ReconciliationState.UPGRADING, suspendedSessionJob.getStatus().getReconciliationStatus().getState()); assertNull(suspendedSessionJob.getStatus().getJobStatus().getState()); int reconcileTimes = 0; while (reconcileTimes < MAX_RECONCILE_TIMES) { verifyReconcileInitialSuspendedDeployment(suspendedSessionJob); reconcileTimes++; } suspendedSessionJob.getSpec().getJob().setState(JobState.RUNNING); suspendedSessionJob.getSpec().getJob().setUpgradeMode(UpgradeMode.SAVEPOINT); suspendedSessionJob.getSpec().getJob().setInitialSavepointPath("s0"); verifyNormalBasicReconcileLoop(suspendedSessionJob); var jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertEquals("s0", jobs.get(0).f0); } @Test public void verifyUpgradeFromSavepointLegacy() throws Exception { UpdateControl<FlinkDeployment> updateControl; sessionJob.getSpec().getJob().setUpgradeMode(UpgradeMode.SAVEPOINT); sessionJob.getSpec().getJob().setInitialSavepointPath("s0"); sessionJob.getSpec().getFlinkConfiguration().put(SNAPSHOT_RESOURCE_ENABLED.key(), "false"); testController.reconcile(sessionJob, context); var jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertEquals("s0", jobs.get(0).f0); assertEquals("s0", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); var previousJobs = new ArrayList<>(jobs); sessionJob.getSpec().getJob().setInitialSavepointPath("s1"); // Send in a no-op change testController.reconcile(sessionJob, context); assertEquals(previousJobs, new ArrayList<>(flinkService.listJobs())); assertEquals("s0", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); // Upgrade job sessionJob.getSpec().getJob().setParallelism(100); updateControl = testController.reconcile(sessionJob, context); assertEquals( "savepoint_0", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); assertEquals(0L, updateControl.getScheduleDelay().get()); assertEquals( JobState.SUSPENDED, sessionJob .getStatus() .getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); flinkService.clearJobsInTerminalState(); testController.reconcile(sessionJob, context); jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertEquals("savepoint_0", jobs.get(0).f0); testController.reconcile(sessionJob, context); assertEquals( "savepoint_0", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); // Suspend job sessionJob.getSpec().getJob().setState(JobState.SUSPENDED); testController.reconcile(sessionJob, context); flinkService.clearJobsInTerminalState(); // Resume from last savepoint sessionJob.getSpec().getJob().setState(JobState.RUNNING); testController.reconcile(sessionJob, context); jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertEquals("savepoint_1", jobs.get(0).f0); assertEquals( "savepoint_1", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); testController.reconcile(sessionJob, context); testController.cleanup(sessionJob, context); flinkService.clearJobsInTerminalState(); jobs = flinkService.listJobs(); assertEquals(0, jobs.size()); } @Test public void verifyLastStateUpgrade() throws Exception { sessionJob.getSpec().getJob().setUpgradeMode(UpgradeMode.LAST_STATE); testController.reconcile(sessionJob, context); // Simulate completed checkpoints flinkService.setCheckpointInfo( Tuple2.of( Optional.of( new CheckpointHistoryWrapper.CompletedCheckpointInfo( 0, "cp1", System.currentTimeMillis())), Optional.empty())); // Trigger Update sessionJob.getSpec().setRestartNonce(3L); testController.reconcile(sessionJob, context); // Make sure we are cancelling assertEquals(CANCELLING, sessionJob.getStatus().getJobStatus().getState()); // Once cancelling completed make sure that last reconciled spec is correctly upgraded and // job was started from cp testController.reconcile(sessionJob, context); assertEquals("cp1", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); assertEquals( UpgradeMode.SAVEPOINT, sessionJob .getStatus() .getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getUpgradeMode()); assertEquals(RECONCILING, sessionJob.getStatus().getJobStatus().getState()); assertEquals( ReconciliationState.DEPLOYED, sessionJob.getStatus().getReconciliationStatus().getState()); flinkService.clearJobsInTerminalState(); var jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertEquals("cp1", jobs.get(0).f0); testController.reconcile(sessionJob, context); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); // Suspend job flinkService.setCheckpointInfo( Tuple2.of( Optional.of( new CheckpointHistoryWrapper.CompletedCheckpointInfo( 0, "cp2", System.currentTimeMillis())), Optional.empty())); sessionJob.getSpec().getJob().setState(JobState.SUSPENDED); testController.reconcile(sessionJob, context); testController.reconcile(sessionJob, context); assertEquals("cp2", sessionJob.getStatus().getJobStatus().getUpgradeSavepointPath()); } @Test public void verifyLastStateUpgradeFailure() throws Exception { sessionJob.getSpec().getJob().setUpgradeMode(UpgradeMode.LAST_STATE); testController.reconcile(sessionJob, context); // Simulate completed checkpoints flinkService.setCheckpointInfo( Tuple2.of( Optional.of( new CheckpointHistoryWrapper.CompletedCheckpointInfo( 0, "cp1", System.currentTimeMillis())), Optional.empty())); // Trigger Update sessionJob.getSpec().setRestartNonce(3L); testController.events().clear(); testController.reconcile(sessionJob, context); // Make sure we are cancelling assertEquals(CANCELLING, sessionJob.getStatus().getJobStatus().getState()); testController.events().poll(); assertEquals( testController.events().poll().getReason(), EventRecorder.Reason.SpecChanged.name()); testController.events().clear(); // Remove all jobs to trigger not found error flinkService.clear(); testController.reconcile(sessionJob, context); assertEquals(JobStatusObserver.JOB_NOT_FOUND_ERR, sessionJob.getStatus().getError()); assertEquals(RECONCILING, sessionJob.getStatus().getJobStatus().getState()); assertEquals( ReconciliationState.DEPLOYED, sessionJob.getStatus().getReconciliationStatus().getState()); testController.events().clear(); testController.reconcile(sessionJob, context); assertEquals( testController.events().poll().getReason(), EventRecorder.Reason.Missing.name()); assertTrue(testController.events().isEmpty()); testController.reconcile(sessionJob, context); assertEquals( testController.events().poll().getReason(), EventRecorder.Reason.Missing.name()); assertTrue(testController.events().isEmpty()); // Deletion should still work var deleteControl = testController.cleanup(sessionJob, context); assertTrue(deleteControl.isRemoveFinalizer()); } @Test public void verifyStatelessUpgrade() throws Exception { UpdateControl<FlinkDeployment> updateControl; sessionJob.getSpec().getJob().setUpgradeMode(UpgradeMode.STATELESS); sessionJob.getSpec().getJob().setInitialSavepointPath("s0"); testController.reconcile(sessionJob, context); var jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertEquals("s0", jobs.get(0).f0); testController.reconcile(sessionJob, context); assertEquals(2, testController.events().size()); assertEquals( EventRecorder.Reason.Submit, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.JobStatusChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); // Upgrade job sessionJob.getSpec().getJob().setParallelism(100); updateControl = testController.reconcile(sessionJob, context); assertEquals(2, testController.events().size()); assertEquals( EventRecorder.Reason.SpecChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.Suspended, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( configManager.getOperatorConfiguration().getProgressCheckInterval().toMillis(), updateControl.getScheduleDelay().get()); assertEquals( JobState.RUNNING, sessionJob .getStatus() .getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); updateControl = testController.reconcile(sessionJob, context); flinkService.clearJobsInTerminalState(); assertEquals( Optional.of( configManager.getOperatorConfiguration().getReconcileInterval().toMillis()), updateControl.getScheduleDelay()); testController.reconcile(sessionJob, context); jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertNull(jobs.get(0).f0); assertEquals(3, testController.events().size()); assertEquals( EventRecorder.Reason.JobStatusChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.Submit, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.JobStatusChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); // Suspend job sessionJob.getSpec().getJob().setState(JobState.SUSPENDED); testController.reconcile(sessionJob, context); assertEquals(2, testController.events().size()); assertEquals( EventRecorder.Reason.SpecChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.Suspended, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); testController.reconcile(sessionJob, context); testController.events().clear(); // Resume from empty state sessionJob.getSpec().getJob().setState(JobState.RUNNING); testController.reconcile(sessionJob, context); flinkService.clearJobsInTerminalState(); testController.reconcile(sessionJob, context); assertEquals(3, testController.events().size()); assertEquals( EventRecorder.Reason.SpecChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.Submit, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.JobStatusChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); jobs = flinkService.listJobs(); assertEquals(1, jobs.size()); assertNull(jobs.get(0).f0); // Inject validation error in the middle of the upgrade sessionJob.getSpec().setRestartNonce(123L); testController.reconcile(sessionJob, context); assertEquals(2, testController.events().size()); assertEquals( EventRecorder.Reason.SpecChanged, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); assertEquals( EventRecorder.Reason.Suspended, EventRecorder.Reason.valueOf(testController.events().poll().getReason())); sessionJob.getSpec().getJob().setParallelism(-1); testController.reconcile(sessionJob, context); flinkService.clearJobsInTerminalState(); assertEquals(3, testController.events().size()); testController.reconcile(sessionJob, context); var statusEvents = testController.events().stream() .filter(e -> !e.getReason().equals(ValidationError.name())) .collect(Collectors.toList()); assertEquals(3, statusEvents.size()); assertEquals( EventRecorder.Reason.JobStatusChanged, EventRecorder.Reason.valueOf(statusEvents.get(0).getReason())); assertEquals( EventRecorder.Reason.Submit, EventRecorder.Reason.valueOf(statusEvents.get(1).getReason())); assertEquals( EventRecorder.Reason.JobStatusChanged, EventRecorder.Reason.valueOf(statusEvents.get(2).getReason())); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); assertEquals( JobState.RUNNING, sessionJob .getStatus() .getReconciliationStatus() .deserializeLastReconciledSpec() .getJob() .getState()); } @Test public void verifyReconcileWithBadConfig() throws Exception { UpdateControl<FlinkDeployment> updateControl; // Override headers, and it should be saved in lastReconciledSpec once a successful // reconcile() finishes. sessionJob .getSpec() .getFlinkConfiguration() .put(KubernetesOperatorConfigOptions.JAR_ARTIFACT_HTTP_HEADER.key(), "changed"); updateControl = testController.reconcile(sessionJob, context); assertFalse(updateControl.isPatchStatus()); assertEquals(RECONCILING, sessionJob.getStatus().getJobStatus().getState()); // Check when the bad config is applied, observe() will change the cluster state correctly sessionJob.getSpec().getJob().setParallelism(-1); // Next reconcile will set error msg and observe with previous validated config updateControl = testController.reconcile(sessionJob, context); assertTrue( sessionJob .getStatus() .getError() .contains("Job parallelism must be larger than 0")); assertFalse(updateControl.isPatchStatus()); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); // Make sure we do validation before getting effective config in reconcile(). // Verify the saved headers in lastReconciledSpec is actually used in observe() by // utilizing listJobConsumer sessionJob .getSpec() .getFlinkConfiguration() .put(KubernetesOperatorConfigOptions.JAR_ARTIFACT_HTTP_HEADER.key(), "again"); flinkService.setListJobConsumer( (configuration) -> assertEquals( "changed", configuration.get( KubernetesOperatorConfigOptions.JAR_ARTIFACT_HTTP_HEADER))); testController.reconcile(sessionJob, context); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); } @Test public void testSuccessfulObservationShouldClearErrors() throws Exception { sessionJob.getSpec().getJob().setParallelism(-1); testController.reconcile(sessionJob, context); assertNull(sessionJob.getStatus().getReconciliationStatus().getLastStableSpec()); // Failed Job deployment should set errors to the status assertTrue( sessionJob .getStatus() .getError() .contains("Job parallelism must be larger than 0")); assertNull(sessionJob.getStatus().getJobStatus().getState()); // Job deployment becomes ready and successful observation should clear the errors sessionJob.getSpec().getJob().setParallelism(1); testController.reconcile(sessionJob, context); assertNull(sessionJob.getStatus().getReconciliationStatus().getLastStableSpec()); testController.reconcile(sessionJob, context); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); assertNull(sessionJob.getStatus().getError()); assertEquals( sessionJob.getStatus().getReconciliationStatus().getLastReconciledSpec(), sessionJob.getStatus().getReconciliationStatus().getLastStableSpec()); } @Test public void testValidationError() throws Exception { UpdateControl<FlinkDeployment> updateControl; sessionJob.getSpec().getJob().setParallelism(-1); updateControl = testController.reconcile(sessionJob, context); assertEquals(1, testController.events().size()); assertNull(sessionJob.getStatus().getJobStatus().getState()); var event = testController.events().remove(); assertEquals("Warning", event.getType()); assertEquals("ValidationError", event.getReason()); assertTrue(event.getMessage().startsWith("Job parallelism ")); // Failed spec should not be rescheduled assertEquals(Optional.empty(), updateControl.getScheduleDelay()); } @Test public void testInitialSavepointOnError() throws Exception { sessionJob.getSpec().getJob().setInitialSavepointPath("msp"); flinkService.setDeployFailure(true); try { testController.reconcile(sessionJob, context); fail(); } catch (Exception expected) { } flinkService.setDeployFailure(false); testController.reconcile(sessionJob, context); assertEquals("msp", flinkService.listJobs().get(0).f0); } @Test public void testErrorOnReconcileWithChainedExceptions() throws Exception { sessionJob.getSpec().getJob().setInitialSavepointPath("msp"); flinkService.setMakeItFailWith( new RuntimeException( "Deployment Failure", new IllegalStateException( null, new SerializedThrowable(new Exception("actual failure reason"))))); try { testController.reconcile(sessionJob, context); fail(); } catch (Exception expected) { } assertEquals(2, testController.events().size()); var event = testController.events().remove(); assertEquals("Submit", event.getReason()); event = testController.events().remove(); assertEquals("Error", event.getReason()); assertEquals( "Deployment Failure -> IllegalStateException -> actual failure reason", event.getMessage()); } @Test public void verifyCanaryHandling() throws Exception { var canary = TestUtils.createCanaryJob(); kubernetesClient.resource(canary).create(); assertTrue(testController.reconcile(canary, context).isNoUpdate()); assertEquals(0, testController.getInternalStatusUpdateCount()); assertEquals(1, testController.getCanaryResourceManager().getNumberOfActiveCanaries()); testController.cleanup(canary, context); assertEquals(0, testController.getInternalStatusUpdateCount()); assertEquals(0, testController.getCanaryResourceManager().getNumberOfActiveCanaries()); } @ParameterizedTest @EnumSource(FlinkVersion.class) public void testUnsupportedVersions(FlinkVersion version) throws Exception { context = TestUtils.createContextWithReadyFlinkDeployment( Map.of(), kubernetesClient, version); var updateControl = testController.reconcile(TestUtils.buildSessionJob(), context); var lastEvent = testController.events().poll(); if (!version.isEqualOrNewer(FlinkVersion.v1_15)) { assertTrue(updateControl.getScheduleDelay().isEmpty()); assertEquals( EventRecorder.Reason.UnsupportedFlinkVersion.name(), lastEvent.getReason()); } else { assertTrue(updateControl.getScheduleDelay().isPresent()); assertEquals(EventRecorder.Reason.Submit.name(), lastEvent.getReason()); } } @Test public void testCancelJobNotFound() throws Exception { testController.reconcile(sessionJob, context); var deleteControl = testController.cleanup(sessionJob, context); assertEquals(CANCELLING, sessionJob.getStatus().getJobStatus().getState()); assertFalse(deleteControl.isRemoveFinalizer()); assertEquals( ResourceLifecycleState.DELETING, testController .getStatusUpdateCounter() .currentResource .getStatus() .getLifecycleState()); assertEquals(ResourceLifecycleState.DELETING, sessionJob.getStatus().getLifecycleState()); assertEquals( configManager.getOperatorConfiguration().getProgressCheckInterval().toMillis(), deleteControl.getScheduleDelay().get()); flinkService.clear(); flinkService.setFlinkJobNotFound(true); deleteControl = testController.cleanup(sessionJob, context); assertTrue(deleteControl.isRemoveFinalizer()); assertEquals( ResourceLifecycleState.DELETED, testController .getStatusUpdateCounter() .currentResource .getStatus() .getLifecycleState()); assertEquals(ResourceLifecycleState.DELETED, sessionJob.getStatus().getLifecycleState()); } private void verifyReconcileInitialSuspendedDeployment(FlinkSessionJob sessionJob) throws Exception { UpdateControl<FlinkDeployment> updateControl = testController.reconcile(suspendedSessionJob, context); // Reconciling assertEquals(JobState.SUSPENDED, suspendedSessionJob.getSpec().getJob().getState()); assertNull(suspendedSessionJob.getStatus().getJobStatus().getState()); assertEquals(1, testController.getInternalStatusUpdateCount()); assertFalse(updateControl.isPatchStatus()); assertEquals( Optional.of( configManager.getOperatorConfiguration().getReconcileInterval().toMillis()), updateControl.getScheduleDelay()); // Validate reconciliation status FlinkSessionJobReconciliationStatus reconciliationStatus = suspendedSessionJob.getStatus().getReconciliationStatus(); assertNull(suspendedSessionJob.getStatus().getError()); assertNull(reconciliationStatus.deserializeLastReconciledSpec()); assertNull(reconciliationStatus.getLastStableSpec()); } private void verifyNormalBasicReconcileLoop(FlinkSessionJob sessionJob) throws Exception { UpdateControl<FlinkDeployment> updateControl = testController.reconcile(sessionJob, context); // Reconciling assertEquals(RECONCILING, sessionJob.getStatus().getJobStatus().getState()); assertEquals(4, testController.getInternalStatusUpdateCount()); assertFalse(updateControl.isPatchStatus()); assertEquals( Optional.of( configManager.getOperatorConfiguration().getReconcileInterval().toMillis()), updateControl.getScheduleDelay()); // Validate reconciliation status FlinkSessionJobReconciliationStatus reconciliationStatus = sessionJob.getStatus().getReconciliationStatus(); assertNull(sessionJob.getStatus().getError()); assertEquals(sessionJob.getSpec(), reconciliationStatus.deserializeLastReconciledSpec()); assertNull(sessionJob.getStatus().getReconciliationStatus().getLastStableSpec()); // Running updateControl = testController.reconcile(sessionJob, context); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); assertEquals(5, testController.getInternalStatusUpdateCount()); assertFalse(updateControl.isPatchStatus()); assertEquals( Optional.of( configManager.getOperatorConfiguration().getReconcileInterval().toMillis()), updateControl.getScheduleDelay()); // Stable loop updateControl = testController.reconcile(sessionJob, context); assertEquals(RUNNING, sessionJob.getStatus().getJobStatus().getState()); assertEquals(5, testController.getInternalStatusUpdateCount()); assertFalse(updateControl.isPatchStatus()); assertEquals( Optional.of( configManager.getOperatorConfiguration().getReconcileInterval().toMillis()), updateControl.getScheduleDelay()); // Validate job status var jobStatus = sessionJob.getStatus().getJobStatus(); JobStatusMessage expectedJobStatus = flinkService.listJobs().get(0).f1; assertEquals(expectedJobStatus.getJobId().toHexString(), jobStatus.getJobId()); assertEquals(expectedJobStatus.getJobName(), jobStatus.getJobName()); assertEquals(expectedJobStatus.getJobState(), jobStatus.getState()); assertEquals( sessionJob.getStatus().getReconciliationStatus().getLastReconciledSpec(), sessionJob.getStatus().getReconciliationStatus().getLastStableSpec()); } }
googleapis/google-cloud-java
35,303
java-meet/proto-google-cloud-meet-v2beta/src/main/java/com/google/apps/meet/v2beta/ListMembersResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/apps/meet/v2beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.apps.meet.v2beta; /** * * * <pre> * Response of list members. * </pre> * * Protobuf type {@code google.apps.meet.v2beta.ListMembersResponse} */ public final class ListMembersResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.apps.meet.v2beta.ListMembersResponse) ListMembersResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListMembersResponse.newBuilder() to construct. private ListMembersResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListMembersResponse() { members_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListMembersResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_ListMembersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_ListMembersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2beta.ListMembersResponse.class, com.google.apps.meet.v2beta.ListMembersResponse.Builder.class); } public static final int MEMBERS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.apps.meet.v2beta.Member> members_; /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ @java.lang.Override public java.util.List<com.google.apps.meet.v2beta.Member> getMembersList() { return members_; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.apps.meet.v2beta.MemberOrBuilder> getMembersOrBuilderList() { return members_; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ @java.lang.Override public int getMembersCount() { return members_.size(); } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ @java.lang.Override public com.google.apps.meet.v2beta.Member getMembers(int index) { return members_.get(index); } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ @java.lang.Override public com.google.apps.meet.v2beta.MemberOrBuilder getMembersOrBuilder(int index) { return members_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < members_.size(); i++) { output.writeMessage(1, members_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < members_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, members_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.apps.meet.v2beta.ListMembersResponse)) { return super.equals(obj); } com.google.apps.meet.v2beta.ListMembersResponse other = (com.google.apps.meet.v2beta.ListMembersResponse) obj; if (!getMembersList().equals(other.getMembersList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getMembersCount() > 0) { hash = (37 * hash) + MEMBERS_FIELD_NUMBER; hash = (53 * hash) + getMembersList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2beta.ListMembersResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.ListMembersResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.ListMembersResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.apps.meet.v2beta.ListMembersResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response of list members. * </pre> * * Protobuf type {@code google.apps.meet.v2beta.ListMembersResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.apps.meet.v2beta.ListMembersResponse) com.google.apps.meet.v2beta.ListMembersResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_ListMembersResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_ListMembersResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2beta.ListMembersResponse.class, com.google.apps.meet.v2beta.ListMembersResponse.Builder.class); } // Construct using com.google.apps.meet.v2beta.ListMembersResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (membersBuilder_ == null) { members_ = java.util.Collections.emptyList(); } else { members_ = null; membersBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_ListMembersResponse_descriptor; } @java.lang.Override public com.google.apps.meet.v2beta.ListMembersResponse getDefaultInstanceForType() { return com.google.apps.meet.v2beta.ListMembersResponse.getDefaultInstance(); } @java.lang.Override public com.google.apps.meet.v2beta.ListMembersResponse build() { com.google.apps.meet.v2beta.ListMembersResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.apps.meet.v2beta.ListMembersResponse buildPartial() { com.google.apps.meet.v2beta.ListMembersResponse result = new com.google.apps.meet.v2beta.ListMembersResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.apps.meet.v2beta.ListMembersResponse result) { if (membersBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { members_ = java.util.Collections.unmodifiableList(members_); bitField0_ = (bitField0_ & ~0x00000001); } result.members_ = members_; } else { result.members_ = membersBuilder_.build(); } } private void buildPartial0(com.google.apps.meet.v2beta.ListMembersResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.apps.meet.v2beta.ListMembersResponse) { return mergeFrom((com.google.apps.meet.v2beta.ListMembersResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.apps.meet.v2beta.ListMembersResponse other) { if (other == com.google.apps.meet.v2beta.ListMembersResponse.getDefaultInstance()) return this; if (membersBuilder_ == null) { if (!other.members_.isEmpty()) { if (members_.isEmpty()) { members_ = other.members_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureMembersIsMutable(); members_.addAll(other.members_); } onChanged(); } } else { if (!other.members_.isEmpty()) { if (membersBuilder_.isEmpty()) { membersBuilder_.dispose(); membersBuilder_ = null; members_ = other.members_; bitField0_ = (bitField0_ & ~0x00000001); membersBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMembersFieldBuilder() : null; } else { membersBuilder_.addAllMessages(other.members_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.apps.meet.v2beta.Member m = input.readMessage( com.google.apps.meet.v2beta.Member.parser(), extensionRegistry); if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.add(m); } else { membersBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.apps.meet.v2beta.Member> members_ = java.util.Collections.emptyList(); private void ensureMembersIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { members_ = new java.util.ArrayList<com.google.apps.meet.v2beta.Member>(members_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.apps.meet.v2beta.Member, com.google.apps.meet.v2beta.Member.Builder, com.google.apps.meet.v2beta.MemberOrBuilder> membersBuilder_; /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public java.util.List<com.google.apps.meet.v2beta.Member> getMembersList() { if (membersBuilder_ == null) { return java.util.Collections.unmodifiableList(members_); } else { return membersBuilder_.getMessageList(); } } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public int getMembersCount() { if (membersBuilder_ == null) { return members_.size(); } else { return membersBuilder_.getCount(); } } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public com.google.apps.meet.v2beta.Member getMembers(int index) { if (membersBuilder_ == null) { return members_.get(index); } else { return membersBuilder_.getMessage(index); } } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder setMembers(int index, com.google.apps.meet.v2beta.Member value) { if (membersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMembersIsMutable(); members_.set(index, value); onChanged(); } else { membersBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder setMembers( int index, com.google.apps.meet.v2beta.Member.Builder builderForValue) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.set(index, builderForValue.build()); onChanged(); } else { membersBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder addMembers(com.google.apps.meet.v2beta.Member value) { if (membersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMembersIsMutable(); members_.add(value); onChanged(); } else { membersBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder addMembers(int index, com.google.apps.meet.v2beta.Member value) { if (membersBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMembersIsMutable(); members_.add(index, value); onChanged(); } else { membersBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder addMembers(com.google.apps.meet.v2beta.Member.Builder builderForValue) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.add(builderForValue.build()); onChanged(); } else { membersBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder addMembers( int index, com.google.apps.meet.v2beta.Member.Builder builderForValue) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.add(index, builderForValue.build()); onChanged(); } else { membersBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder addAllMembers( java.lang.Iterable<? extends com.google.apps.meet.v2beta.Member> values) { if (membersBuilder_ == null) { ensureMembersIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, members_); onChanged(); } else { membersBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder clearMembers() { if (membersBuilder_ == null) { members_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { membersBuilder_.clear(); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public Builder removeMembers(int index) { if (membersBuilder_ == null) { ensureMembersIsMutable(); members_.remove(index); onChanged(); } else { membersBuilder_.remove(index); } return this; } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public com.google.apps.meet.v2beta.Member.Builder getMembersBuilder(int index) { return getMembersFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public com.google.apps.meet.v2beta.MemberOrBuilder getMembersOrBuilder(int index) { if (membersBuilder_ == null) { return members_.get(index); } else { return membersBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public java.util.List<? extends com.google.apps.meet.v2beta.MemberOrBuilder> getMembersOrBuilderList() { if (membersBuilder_ != null) { return membersBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(members_); } } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public com.google.apps.meet.v2beta.Member.Builder addMembersBuilder() { return getMembersFieldBuilder() .addBuilder(com.google.apps.meet.v2beta.Member.getDefaultInstance()); } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public com.google.apps.meet.v2beta.Member.Builder addMembersBuilder(int index) { return getMembersFieldBuilder() .addBuilder(index, com.google.apps.meet.v2beta.Member.getDefaultInstance()); } /** * * * <pre> * The list of members for the current page. * </pre> * * <code>repeated .google.apps.meet.v2beta.Member members = 1;</code> */ public java.util.List<com.google.apps.meet.v2beta.Member.Builder> getMembersBuilderList() { return getMembersFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.apps.meet.v2beta.Member, com.google.apps.meet.v2beta.Member.Builder, com.google.apps.meet.v2beta.MemberOrBuilder> getMembersFieldBuilder() { if (membersBuilder_ == null) { membersBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.apps.meet.v2beta.Member, com.google.apps.meet.v2beta.Member.Builder, com.google.apps.meet.v2beta.MemberOrBuilder>( members_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); members_ = null; } return membersBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to be circulated back for further list call if current list doesn't * include all the members. Unset if all members are returned. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.apps.meet.v2beta.ListMembersResponse) } // @@protoc_insertion_point(class_scope:google.apps.meet.v2beta.ListMembersResponse) private static final com.google.apps.meet.v2beta.ListMembersResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.apps.meet.v2beta.ListMembersResponse(); } public static com.google.apps.meet.v2beta.ListMembersResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListMembersResponse> PARSER = new com.google.protobuf.AbstractParser<ListMembersResponse>() { @java.lang.Override public ListMembersResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListMembersResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListMembersResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.apps.meet.v2beta.ListMembersResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,358
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/SearchNearestEntitiesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/feature_online_store_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * The request message for * [FeatureOnlineStoreService.SearchNearestEntities][google.cloud.aiplatform.v1.FeatureOnlineStoreService.SearchNearestEntities]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.SearchNearestEntitiesRequest} */ public final class SearchNearestEntitiesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) SearchNearestEntitiesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SearchNearestEntitiesRequest.newBuilder() to construct. private SearchNearestEntitiesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchNearestEntitiesRequest() { featureView_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchNearestEntitiesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_SearchNearestEntitiesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_SearchNearestEntitiesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.class, com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.Builder.class); } private int bitField0_; public static final int FEATURE_VIEW_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object featureView_ = ""; /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The featureView. */ @java.lang.Override public java.lang.String getFeatureView() { java.lang.Object ref = featureView_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureView_ = s; return s; } } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for featureView. */ @java.lang.Override public com.google.protobuf.ByteString getFeatureViewBytes() { java.lang.Object ref = featureView_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); featureView_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int QUERY_FIELD_NUMBER = 2; private com.google.cloud.aiplatform.v1.NearestNeighborQuery query_; /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the query field is set. */ @java.lang.Override public boolean hasQuery() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The query. */ @java.lang.Override public com.google.cloud.aiplatform.v1.NearestNeighborQuery getQuery() { return query_ == null ? com.google.cloud.aiplatform.v1.NearestNeighborQuery.getDefaultInstance() : query_; } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1.NearestNeighborQueryOrBuilder getQueryOrBuilder() { return query_ == null ? com.google.cloud.aiplatform.v1.NearestNeighborQuery.getDefaultInstance() : query_; } public static final int RETURN_FULL_ENTITY_FIELD_NUMBER = 3; private boolean returnFullEntity_ = false; /** * * * <pre> * Optional. If set to true, the full entities (including all vector values * and metadata) of the nearest neighbors are returned; otherwise only entity * id of the nearest neighbors will be returned. Note that returning full * entities will significantly increase the latency and cost of the query. * </pre> * * <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The returnFullEntity. */ @java.lang.Override public boolean getReturnFullEntity() { return returnFullEntity_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(featureView_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, featureView_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getQuery()); } if (returnFullEntity_ != false) { output.writeBool(3, returnFullEntity_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(featureView_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, featureView_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getQuery()); } if (returnFullEntity_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, returnFullEntity_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest other = (com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) obj; if (!getFeatureView().equals(other.getFeatureView())) return false; if (hasQuery() != other.hasQuery()) return false; if (hasQuery()) { if (!getQuery().equals(other.getQuery())) return false; } if (getReturnFullEntity() != other.getReturnFullEntity()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + FEATURE_VIEW_FIELD_NUMBER; hash = (53 * hash) + getFeatureView().hashCode(); if (hasQuery()) { hash = (37 * hash) + QUERY_FIELD_NUMBER; hash = (53 * hash) + getQuery().hashCode(); } hash = (37 * hash) + RETURN_FULL_ENTITY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getReturnFullEntity()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [FeatureOnlineStoreService.SearchNearestEntities][google.cloud.aiplatform.v1.FeatureOnlineStoreService.SearchNearestEntities]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.SearchNearestEntitiesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_SearchNearestEntitiesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_SearchNearestEntitiesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.class, com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getQueryFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; featureView_ = ""; query_ = null; if (queryBuilder_ != null) { queryBuilder_.dispose(); queryBuilder_ = null; } returnFullEntity_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.FeatureOnlineStoreServiceProto .internal_static_google_cloud_aiplatform_v1_SearchNearestEntitiesRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest build() { com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest buildPartial() { com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest result = new com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.featureView_ = featureView_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.query_ = queryBuilder_ == null ? query_ : queryBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.returnFullEntity_ = returnFullEntity_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest other) { if (other == com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest.getDefaultInstance()) return this; if (!other.getFeatureView().isEmpty()) { featureView_ = other.featureView_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasQuery()) { mergeQuery(other.getQuery()); } if (other.getReturnFullEntity() != false) { setReturnFullEntity(other.getReturnFullEntity()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { featureView_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getQueryFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { returnFullEntity_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object featureView_ = ""; /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The featureView. */ public java.lang.String getFeatureView() { java.lang.Object ref = featureView_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); featureView_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for featureView. */ public com.google.protobuf.ByteString getFeatureViewBytes() { java.lang.Object ref = featureView_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); featureView_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The featureView to set. * @return This builder for chaining. */ public Builder setFeatureView(java.lang.String value) { if (value == null) { throw new NullPointerException(); } featureView_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearFeatureView() { featureView_ = getDefaultInstance().getFeatureView(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. FeatureView resource format * `projects/{project}/locations/{location}/featureOnlineStores/{featureOnlineStore}/featureViews/{featureView}` * </pre> * * <code> * string feature_view = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for featureView to set. * @return This builder for chaining. */ public Builder setFeatureViewBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); featureView_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.aiplatform.v1.NearestNeighborQuery query_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.NearestNeighborQuery, com.google.cloud.aiplatform.v1.NearestNeighborQuery.Builder, com.google.cloud.aiplatform.v1.NearestNeighborQueryOrBuilder> queryBuilder_; /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the query field is set. */ public boolean hasQuery() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The query. */ public com.google.cloud.aiplatform.v1.NearestNeighborQuery getQuery() { if (queryBuilder_ == null) { return query_ == null ? com.google.cloud.aiplatform.v1.NearestNeighborQuery.getDefaultInstance() : query_; } else { return queryBuilder_.getMessage(); } } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQuery(com.google.cloud.aiplatform.v1.NearestNeighborQuery value) { if (queryBuilder_ == null) { if (value == null) { throw new NullPointerException(); } query_ = value; } else { queryBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setQuery( com.google.cloud.aiplatform.v1.NearestNeighborQuery.Builder builderForValue) { if (queryBuilder_ == null) { query_ = builderForValue.build(); } else { queryBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeQuery(com.google.cloud.aiplatform.v1.NearestNeighborQuery value) { if (queryBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && query_ != null && query_ != com.google.cloud.aiplatform.v1.NearestNeighborQuery.getDefaultInstance()) { getQueryBuilder().mergeFrom(value); } else { query_ = value; } } else { queryBuilder_.mergeFrom(value); } if (query_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearQuery() { bitField0_ = (bitField0_ & ~0x00000002); query_ = null; if (queryBuilder_ != null) { queryBuilder_.dispose(); queryBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.NearestNeighborQuery.Builder getQueryBuilder() { bitField0_ |= 0x00000002; onChanged(); return getQueryFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.aiplatform.v1.NearestNeighborQueryOrBuilder getQueryOrBuilder() { if (queryBuilder_ != null) { return queryBuilder_.getMessageOrBuilder(); } else { return query_ == null ? com.google.cloud.aiplatform.v1.NearestNeighborQuery.getDefaultInstance() : query_; } } /** * * * <pre> * Required. The query. * </pre> * * <code> * .google.cloud.aiplatform.v1.NearestNeighborQuery query = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.NearestNeighborQuery, com.google.cloud.aiplatform.v1.NearestNeighborQuery.Builder, com.google.cloud.aiplatform.v1.NearestNeighborQueryOrBuilder> getQueryFieldBuilder() { if (queryBuilder_ == null) { queryBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1.NearestNeighborQuery, com.google.cloud.aiplatform.v1.NearestNeighborQuery.Builder, com.google.cloud.aiplatform.v1.NearestNeighborQueryOrBuilder>( getQuery(), getParentForChildren(), isClean()); query_ = null; } return queryBuilder_; } private boolean returnFullEntity_; /** * * * <pre> * Optional. If set to true, the full entities (including all vector values * and metadata) of the nearest neighbors are returned; otherwise only entity * id of the nearest neighbors will be returned. Note that returning full * entities will significantly increase the latency and cost of the query. * </pre> * * <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The returnFullEntity. */ @java.lang.Override public boolean getReturnFullEntity() { return returnFullEntity_; } /** * * * <pre> * Optional. If set to true, the full entities (including all vector values * and metadata) of the nearest neighbors are returned; otherwise only entity * id of the nearest neighbors will be returned. Note that returning full * entities will significantly increase the latency and cost of the query. * </pre> * * <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The returnFullEntity to set. * @return This builder for chaining. */ public Builder setReturnFullEntity(boolean value) { returnFullEntity_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set to true, the full entities (including all vector values * and metadata) of the nearest neighbors are returned; otherwise only entity * id of the nearest neighbors will be returned. Note that returning full * entities will significantly increase the latency and cost of the query. * </pre> * * <code>bool return_full_entity = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearReturnFullEntity() { bitField0_ = (bitField0_ & ~0x00000004); returnFullEntity_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.SearchNearestEntitiesRequest) private static final com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest(); } public static com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchNearestEntitiesRequest> PARSER = new com.google.protobuf.AbstractParser<SearchNearestEntitiesRequest>() { @java.lang.Override public SearchNearestEntitiesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchNearestEntitiesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchNearestEntitiesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.SearchNearestEntitiesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,402
java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/SwitchoverClusterRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1beta; /** * * * <pre> * Message for switching over to a cluster * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.SwitchoverClusterRequest} */ public final class SwitchoverClusterRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.SwitchoverClusterRequest) SwitchoverClusterRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SwitchoverClusterRequest.newBuilder() to construct. private SwitchoverClusterRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SwitchoverClusterRequest() { name_ = ""; requestId_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SwitchoverClusterRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_SwitchoverClusterRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_SwitchoverClusterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.class, com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REQUEST_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ @java.lang.Override public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ @java.lang.Override public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest)) { return super.equals(obj); } com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest other = (com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest) obj; if (!getName().equals(other.getName())) return false; if (!getRequestId().equals(other.getRequestId())) return false; if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER; hash = (53 * hash) + getRequestId().hashCode(); hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for switching over to a cluster * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.SwitchoverClusterRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.SwitchoverClusterRequest) com.google.cloud.alloydb.v1beta.SwitchoverClusterRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_SwitchoverClusterRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_SwitchoverClusterRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.class, com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.Builder.class); } // Construct using com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; requestId_ = ""; validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_SwitchoverClusterRequest_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest getDefaultInstanceForType() { return com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest build() { com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest buildPartial() { com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest result = new com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.requestId_ = requestId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest) { return mergeFrom((com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest other) { if (other == com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRequestId().isEmpty()) { requestId_ = other.requestId_; bitField0_ |= 0x00000002; onChanged(); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { requestId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The name of the resource. For the required format, see the * comment on the Cluster.name field * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object requestId_ = ""; /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The requestId. */ public java.lang.String getRequestId() { java.lang.Object ref = requestId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); requestId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for requestId. */ public com.google.protobuf.ByteString getRequestIdBytes() { java.lang.Object ref = requestId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); requestId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The requestId to set. * @return This builder for chaining. */ public Builder setRequestId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearRequestId() { requestId_ = getDefaultInstance().getRequestId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. An optional request ID to identify requests. Specify a unique * request ID so that if you must retry your request, the server ignores the * request if it has already been completed. The server guarantees that for at * least 60 minutes since the first request. * * For example, consider a situation where you make an initial request and * the request times out. If you make the request again with the same request * ID, the server can check if the original operation with the same request ID * was received, and if so, ignores the second request. This prevents * clients from accidentally creating duplicate commitments. * * The request ID must be a valid UUID with the exception that zero UUID is * not supported (00000000-0000-0000-0000-000000000000). * </pre> * * <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for requestId to set. * @return This builder for chaining. */ public Builder setRequestIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); requestId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private boolean validateOnly_; /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. If set, performs request validation, for example, permission * checks and any other type of validation, but does not actually execute the * create request. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.SwitchoverClusterRequest) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.SwitchoverClusterRequest) private static final com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest(); } public static com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SwitchoverClusterRequest> PARSER = new com.google.protobuf.AbstractParser<SwitchoverClusterRequest>() { @java.lang.Override public SwitchoverClusterRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SwitchoverClusterRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SwitchoverClusterRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1beta.SwitchoverClusterRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
35,286
java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/SearchBlurbsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: schema/google/showcase/v1beta1/messaging.proto // Protobuf Java Version: 3.25.8 package com.google.showcase.v1beta1; /** * * * <pre> * The request message for the google.showcase.v1beta1.Messaging&#92;SearchBlurbs * method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.SearchBlurbsRequest} */ public final class SearchBlurbsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.showcase.v1beta1.SearchBlurbsRequest) SearchBlurbsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SearchBlurbsRequest.newBuilder() to construct. private SearchBlurbsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchBlurbsRequest() { query_ = ""; parent_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchBlurbsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.SearchBlurbsRequest.class, com.google.showcase.v1beta1.SearchBlurbsRequest.Builder.class); } public static final int QUERY_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object query_ = ""; /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The query. */ @java.lang.Override public java.lang.String getQuery() { java.lang.Object ref = query_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); query_ = s; return s; } } /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for query. */ @java.lang.Override public com.google.protobuf.ByteString getQueryBytes() { java.lang.Object ref = query_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); query_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PARENT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * The maximum number of blurbs return. Server may return fewer * blurbs than requested. If unspecified, server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, query_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, parent_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, query_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.showcase.v1beta1.SearchBlurbsRequest)) { return super.equals(obj); } com.google.showcase.v1beta1.SearchBlurbsRequest other = (com.google.showcase.v1beta1.SearchBlurbsRequest) obj; if (!getQuery().equals(other.getQuery())) return false; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + QUERY_FIELD_NUMBER; hash = (53 * hash) + getQuery().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.SearchBlurbsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.showcase.v1beta1.SearchBlurbsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for the google.showcase.v1beta1.Messaging&#92;SearchBlurbs * method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.SearchBlurbsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.SearchBlurbsRequest) com.google.showcase.v1beta1.SearchBlurbsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.SearchBlurbsRequest.class, com.google.showcase.v1beta1.SearchBlurbsRequest.Builder.class); } // Construct using com.google.showcase.v1beta1.SearchBlurbsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; query_ = ""; parent_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsRequest_descriptor; } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsRequest getDefaultInstanceForType() { return com.google.showcase.v1beta1.SearchBlurbsRequest.getDefaultInstance(); } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsRequest build() { com.google.showcase.v1beta1.SearchBlurbsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsRequest buildPartial() { com.google.showcase.v1beta1.SearchBlurbsRequest result = new com.google.showcase.v1beta1.SearchBlurbsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.showcase.v1beta1.SearchBlurbsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.query_ = query_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.showcase.v1beta1.SearchBlurbsRequest) { return mergeFrom((com.google.showcase.v1beta1.SearchBlurbsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.showcase.v1beta1.SearchBlurbsRequest other) { if (other == com.google.showcase.v1beta1.SearchBlurbsRequest.getDefaultInstance()) return this; if (!other.getQuery().isEmpty()) { query_ = other.query_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { query_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object query_ = ""; /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The query. */ public java.lang.String getQuery() { java.lang.Object ref = query_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); query_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for query. */ public com.google.protobuf.ByteString getQueryBytes() { java.lang.Object ref = query_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); query_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The query to set. * @return This builder for chaining. */ public Builder setQuery(java.lang.String value) { if (value == null) { throw new NullPointerException(); } query_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearQuery() { query_ = getDefaultInstance().getQuery(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The query used to search for blurbs containing to words of this string. * Only posts that contain an exact match of a queried word will be returned. * </pre> * * <code>string query = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for query to set. * @return This builder for chaining. */ public Builder setQueryBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); query_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The rooms or profiles to search. If unset, `SearchBlurbs` will search all * rooms and all profiles. * </pre> * * <code>string parent = 2 [(.google.api.resource_reference) = { ... }</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of blurbs return. Server may return fewer * blurbs than requested. If unspecified, server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of blurbs return. Server may return fewer * blurbs than requested. If unspecified, server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The maximum number of blurbs return. Server may return fewer * blurbs than requested. If unspecified, server will pick an appropriate * default. * </pre> * * <code>int32 page_size = 3;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The value of * google.showcase.v1beta1.SearchBlurbsResponse.next_page_token * returned from the previous call to * `google.showcase.v1beta1.Messaging&#92;SearchBlurbs` method. * </pre> * * <code>string page_token = 4;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.SearchBlurbsRequest) } // @@protoc_insertion_point(class_scope:google.showcase.v1beta1.SearchBlurbsRequest) private static final com.google.showcase.v1beta1.SearchBlurbsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.showcase.v1beta1.SearchBlurbsRequest(); } public static com.google.showcase.v1beta1.SearchBlurbsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchBlurbsRequest> PARSER = new com.google.protobuf.AbstractParser<SearchBlurbsRequest>() { @java.lang.Override public SearchBlurbsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchBlurbsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchBlurbsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/kafka
35,343
metadata/src/test/java/org/apache/kafka/metadata/authorizer/StandardAuthorizerTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.metadata.authorizer; import org.apache.kafka.common.ClusterResource; import org.apache.kafka.common.Endpoint; import org.apache.kafka.common.Uuid; import org.apache.kafka.common.acl.AccessControlEntryFilter; import org.apache.kafka.common.acl.AclBinding; import org.apache.kafka.common.acl.AclBindingFilter; import org.apache.kafka.common.acl.AclOperation; import org.apache.kafka.common.acl.AclPermissionType; import org.apache.kafka.common.errors.AuthorizerNotReadyException; import org.apache.kafka.common.errors.TimeoutException; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.metrics.internals.PluginMetricsImpl; import org.apache.kafka.common.resource.PatternType; import org.apache.kafka.common.resource.ResourcePattern; import org.apache.kafka.common.resource.ResourcePatternFilter; import org.apache.kafka.common.resource.ResourceType; import org.apache.kafka.common.security.auth.KafkaPrincipal; import org.apache.kafka.common.security.auth.SecurityProtocol; import org.apache.kafka.server.authorizer.Action; import org.apache.kafka.server.authorizer.AuthorizableRequestContext; import org.apache.kafka.server.authorizer.AuthorizerServerInfo; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.mockito.MockedStatic; import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletionStage; import static org.apache.kafka.common.acl.AclOperation.ALL; import static org.apache.kafka.common.acl.AclOperation.ALTER; import static org.apache.kafka.common.acl.AclOperation.ALTER_CONFIGS; import static org.apache.kafka.common.acl.AclOperation.CREATE; import static org.apache.kafka.common.acl.AclOperation.DELETE; import static org.apache.kafka.common.acl.AclOperation.DESCRIBE; import static org.apache.kafka.common.acl.AclOperation.DESCRIBE_CONFIGS; import static org.apache.kafka.common.acl.AclOperation.READ; import static org.apache.kafka.common.acl.AclOperation.WRITE; import static org.apache.kafka.common.acl.AclPermissionType.ALLOW; import static org.apache.kafka.common.acl.AclPermissionType.DENY; import static org.apache.kafka.common.resource.PatternType.LITERAL; import static org.apache.kafka.common.resource.PatternType.PREFIXED; import static org.apache.kafka.common.resource.ResourceType.GROUP; import static org.apache.kafka.common.resource.ResourceType.TOPIC; import static org.apache.kafka.common.security.auth.KafkaPrincipal.USER_TYPE; import static org.apache.kafka.metadata.authorizer.StandardAuthorizer.ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG; import static org.apache.kafka.metadata.authorizer.StandardAuthorizer.SUPER_USERS_CONFIG; import static org.apache.kafka.metadata.authorizer.StandardAuthorizer.getConfiguredSuperUsers; import static org.apache.kafka.metadata.authorizer.StandardAuthorizer.getDefaultResult; import static org.apache.kafka.metadata.authorizer.StandardAuthorizerData.WILDCARD; import static org.apache.kafka.metadata.authorizer.StandardAuthorizerData.WILDCARD_PRINCIPAL; import static org.apache.kafka.metadata.authorizer.StandardAuthorizerData.findResult; import static org.apache.kafka.server.authorizer.AuthorizationResult.ALLOWED; import static org.apache.kafka.server.authorizer.AuthorizationResult.DENIED; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; @Timeout(value = 40) public class StandardAuthorizerTest { public static final Endpoint PLAINTEXT = new Endpoint("PLAINTEXT", SecurityProtocol.PLAINTEXT, "127.0.0.1", 9020); public static final Endpoint CONTROLLER = new Endpoint("CONTROLLER", SecurityProtocol.PLAINTEXT, "127.0.0.1", 9020); public record AuthorizerTestServerInfo(Collection<Endpoint> endpoints) implements AuthorizerServerInfo { public AuthorizerTestServerInfo { assertFalse(endpoints.isEmpty()); } @Override public ClusterResource clusterResource() { return new ClusterResource(Uuid.fromString("r7mqHQrxTNmzbKvCvWZzLQ").toString()); } @Override public int brokerId() { return 0; } @Override public Endpoint interBrokerEndpoint() { return endpoints.iterator().next(); } @Override public Collection<String> earlyStartListeners() { List<String> result = new ArrayList<>(); for (Endpoint endpoint : endpoints) { if (endpoint.listener().equals("CONTROLLER")) { result.add(endpoint.listener()); } } return result; } } private final Metrics metrics = new Metrics(); @Test public void testGetConfiguredSuperUsers() { assertEquals(Set.of(), getConfiguredSuperUsers(Map.of())); assertEquals(Set.of(), getConfiguredSuperUsers(Map.of(SUPER_USERS_CONFIG, " "))); assertEquals(Set.of("User:bob", "User:alice"), getConfiguredSuperUsers(Map.of(SUPER_USERS_CONFIG, "User:bob;User:alice "))); assertEquals(Set.of("User:bob", "User:alice"), getConfiguredSuperUsers(Map.of(SUPER_USERS_CONFIG, "; User:bob ; User:alice "))); assertEquals("expected a string in format principalType:principalName but got bob", assertThrows(IllegalArgumentException.class, () -> getConfiguredSuperUsers( Map.of(SUPER_USERS_CONFIG, "bob;:alice"))).getMessage()); } @Test public void testGetDefaultResult() { assertEquals(DENIED, getDefaultResult(Map.of())); assertEquals(ALLOWED, getDefaultResult(Map.of( ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG, "true"))); assertEquals(DENIED, getDefaultResult(Map.of( ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG, "false"))); } @Test public void testAllowEveryoneIfNoAclFoundConfigEnabled() throws Exception { Map<String, Object> configs = Map.of( SUPER_USERS_CONFIG, "User:alice;User:chris", ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG, "true"); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(configs); List<StandardAclWithId> acls = List.of( withId(new StandardAcl(TOPIC, "topic1", LITERAL, "User:Alice", WILDCARD, READ, ALLOW)) ); acls.forEach(acl -> authorizer.addAcl(acl.id(), acl.acl())); assertEquals(List.of(DENIED), authorizer.authorize( new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, "Bob")) .build(), List.of(newAction(READ, TOPIC, "topic1")) )); assertEquals(List.of(ALLOWED), authorizer.authorize( new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, "Bob")) .build(), List.of(newAction(READ, TOPIC, "topic2")) )); } @Test public void testAllowEveryoneIfNoAclFoundConfigDisabled() throws Exception { Map<String, Object> configs = Map.of( SUPER_USERS_CONFIG, "User:alice;User:chris", ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG, "false"); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(configs); List<StandardAclWithId> acls = List.of( withId(new StandardAcl(TOPIC, "topic1", LITERAL, "User:Alice", WILDCARD, READ, ALLOW)) ); acls.forEach(acl -> authorizer.addAcl(acl.id(), acl.acl())); assertEquals(List.of(DENIED), authorizer.authorize( new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, "Bob")) .build(), List.of(newAction(READ, TOPIC, "topic1")) )); assertEquals(List.of(DENIED), authorizer.authorize( new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, "Bob")) .build(), List.of(newAction(READ, TOPIC, "topic2")) )); } @Test public void testConfigure() { Map<String, Object> configs = Map.of( SUPER_USERS_CONFIG, "User:alice;User:chris", ALLOW_EVERYONE_IF_NO_ACL_IS_FOUND_CONFIG, "true"); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(configs); assertEquals(Set.of("User:alice", "User:chris"), authorizer.superUsers()); assertEquals(ALLOWED, authorizer.defaultResult()); } private static Action newAction(AclOperation aclOperation, ResourceType resourceType, String resourceName) { return new Action(aclOperation, new ResourcePattern(resourceType, resourceName, LITERAL), 1, false, false); } private StandardAuthorizer createAndInitializeStandardAuthorizer() { return createAndInitializeStandardAuthorizer(Map.of(SUPER_USERS_CONFIG, "User:superman")); } private StandardAuthorizer createAndInitializeStandardAuthorizer(Map<String, Object> configs) { StandardAuthorizer authorizer = new StandardAuthorizer(); authorizer.configure(configs); authorizer.withPluginMetrics(new PluginMetricsImpl(metrics, Map.of())); authorizer.start(new AuthorizerTestServerInfo(List.of(PLAINTEXT))); authorizer.completeInitialLoad(); return authorizer; } private static StandardAcl newFooAcl(AclOperation op, AclPermissionType permission) { return new StandardAcl( TOPIC, "foo_", PREFIXED, "User:bob", WILDCARD, op, permission); } private static StandardAclWithId withId(StandardAcl acl) { return new StandardAclWithId(new Uuid(acl.hashCode(), acl.hashCode()), acl); } @Test public void testFindResultImplication() throws Exception { // These permissions all imply DESCRIBE. for (AclOperation op : List.of(DESCRIBE, READ, WRITE, DELETE, ALTER)) { assertEquals(ALLOWED, findResult(newAction(DESCRIBE, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(op, ALLOW))); } // CREATE does not imply DESCRIBE assertNull(findResult(newAction(DESCRIBE, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(CREATE, ALLOW))); // Deny ACLs don't do "implication". for (AclOperation op : List.of(READ, WRITE, DELETE, ALTER)) { assertNull(findResult(newAction(DESCRIBE, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(op, DENY))); } // Exact match assertEquals(DENIED, findResult(newAction(DESCRIBE, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(DESCRIBE, DENY))); // These permissions all imply DESCRIBE_CONFIGS. for (AclOperation op : List.of(DESCRIBE_CONFIGS, ALTER_CONFIGS)) { assertEquals(ALLOWED, findResult(newAction(DESCRIBE_CONFIGS, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(op, ALLOW))); } // Deny ACLs don't do "implication". assertNull(findResult(newAction(DESCRIBE_CONFIGS, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(ALTER_CONFIGS, DENY))); // Exact match assertEquals(DENIED, findResult(newAction(ALTER_CONFIGS, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(ALTER_CONFIGS, DENY))); } private static StandardAcl newBarAcl(AclOperation op, AclPermissionType permission) { return new StandardAcl( GROUP, "bar", LITERAL, WILDCARD_PRINCIPAL, WILDCARD, op, permission); } @Test public void testFindResultPrincipalMatching() throws Exception { assertEquals(ALLOWED, findResult(newAction(READ, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), newFooAcl(READ, ALLOW))); // Principal does not match. assertNull(findResult(newAction(READ, TOPIC, "foo_bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "alice")).build(), newFooAcl(READ, ALLOW))); // Wildcard principal matches anything. assertEquals(DENIED, findResult(newAction(READ, GROUP, "bar"), new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "alice")).build(), newBarAcl(READ, DENY))); } private static void assertContains(Iterable<AclBinding> iterable, StandardAcl... acls) { Iterator<AclBinding> iterator = iterable.iterator(); for (int i = 0; iterator.hasNext(); i++) { AclBinding acl = iterator.next(); assertTrue(i < acls.length, "Only expected " + i + " element(s)"); assertEquals(acls[i].toBinding(), acl, "Unexpected element " + i); } assertFalse(iterator.hasNext(), "Expected only " + acls.length + " element(s)"); } @Test public void testListAcls() { StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); List<StandardAclWithId> fooAcls = List.of( withId(newFooAcl(READ, ALLOW)), withId(newFooAcl(WRITE, ALLOW))); List<StandardAclWithId> barAcls = List.of( withId(newBarAcl(DESCRIBE_CONFIGS, DENY)), withId(newBarAcl(ALTER_CONFIGS, DENY))); fooAcls.forEach(a -> authorizer.addAcl(a.id(), a.acl())); barAcls.forEach(a -> authorizer.addAcl(a.id(), a.acl())); assertContains(authorizer.acls(AclBindingFilter.ANY), fooAcls.get(0).acl(), fooAcls.get(1).acl(), barAcls.get(0).acl(), barAcls.get(1).acl()); authorizer.removeAcl(fooAcls.get(1).id()); assertContains(authorizer.acls(AclBindingFilter.ANY), fooAcls.get(0).acl(), barAcls.get(0).acl(), barAcls.get(1).acl()); assertContains(authorizer.acls(new AclBindingFilter(new ResourcePatternFilter( TOPIC, null, PatternType.ANY), AccessControlEntryFilter.ANY)), fooAcls.get(0).acl()); } @Test public void testSimpleAuthorizations() throws Exception { StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); List<StandardAclWithId> fooAcls = List.of( withId(newFooAcl(READ, ALLOW)), withId(newFooAcl(WRITE, ALLOW))); List<StandardAclWithId> barAcls = List.of( withId(newBarAcl(DESCRIBE_CONFIGS, ALLOW)), withId(newBarAcl(ALTER_CONFIGS, ALLOW))); fooAcls.forEach(a -> authorizer.addAcl(a.id(), a.acl())); barAcls.forEach(a -> authorizer.addAcl(a.id(), a.acl())); assertEquals(List.of(ALLOWED), authorizer.authorize(new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), List.of(newAction(READ, TOPIC, "foo_")))); assertEquals(List.of(ALLOWED), authorizer.authorize(new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "fred")).build(), List.of(newAction(ALTER_CONFIGS, GROUP, "bar")))); } @Test public void testDenyPrecedenceWithOperationAll() throws Exception { StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); List<StandardAcl> acls = List.of( new StandardAcl(TOPIC, "foo", LITERAL, "User:alice", "*", ALL, DENY), new StandardAcl(TOPIC, "foo", PREFIXED, "User:alice", "*", READ, ALLOW), new StandardAcl(TOPIC, "foo", LITERAL, "User:*", "*", ALL, DENY), new StandardAcl(TOPIC, "foo", PREFIXED, "User:*", "*", DESCRIBE, ALLOW) ); acls.forEach(acl -> { StandardAclWithId aclWithId = withId(acl); authorizer.addAcl(aclWithId.id(), aclWithId.acl()); }); assertEquals(List.of(DENIED, DENIED, DENIED, ALLOWED), authorizer.authorize( newRequestContext("alice"), List.of( newAction(WRITE, TOPIC, "foo"), newAction(READ, TOPIC, "foo"), newAction(DESCRIBE, TOPIC, "foo"), newAction(READ, TOPIC, "foobar")))); assertEquals(List.of(DENIED, DENIED, DENIED, ALLOWED, DENIED), authorizer.authorize( newRequestContext("bob"), List.of( newAction(DESCRIBE, TOPIC, "foo"), newAction(READ, TOPIC, "foo"), newAction(WRITE, TOPIC, "foo"), newAction(DESCRIBE, TOPIC, "foobaz"), newAction(READ, TOPIC, "foobaz")))); } @Test public void testTopicAclWithOperationAll() throws Exception { StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); List<StandardAcl> acls = List.of( new StandardAcl(TOPIC, "foo", LITERAL, "User:*", "*", ALL, ALLOW), new StandardAcl(TOPIC, "bar", PREFIXED, "User:alice", "*", ALL, ALLOW), new StandardAcl(TOPIC, "baz", LITERAL, "User:bob", "*", ALL, ALLOW) ); acls.forEach(acl -> { StandardAclWithId aclWithId = withId(acl); authorizer.addAcl(aclWithId.id(), aclWithId.acl()); }); assertEquals(List.of(ALLOWED, ALLOWED, DENIED), authorizer.authorize( newRequestContext("alice"), List.of( newAction(WRITE, TOPIC, "foo"), newAction(DESCRIBE_CONFIGS, TOPIC, "bar"), newAction(DESCRIBE, TOPIC, "baz")))); assertEquals(List.of(ALLOWED, DENIED, ALLOWED), authorizer.authorize( newRequestContext("bob"), List.of( newAction(WRITE, TOPIC, "foo"), newAction(READ, TOPIC, "bar"), newAction(DESCRIBE, TOPIC, "baz")))); assertEquals(List.of(ALLOWED, DENIED, DENIED), authorizer.authorize( newRequestContext("malory"), List.of( newAction(DESCRIBE, TOPIC, "foo"), newAction(WRITE, TOPIC, "bar"), newAction(READ, TOPIC, "baz")))); } private AuthorizableRequestContext newRequestContext(String principal) throws Exception { return new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, principal)) .build(); } @Test public void testHostAddressAclValidation() throws Exception { InetAddress host1 = InetAddress.getByName("192.168.1.1"); InetAddress host2 = InetAddress.getByName("192.168.1.2"); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); List<StandardAcl> acls = List.of( new StandardAcl(TOPIC, "foo", LITERAL, "User:alice", host1.getHostAddress(), READ, DENY), new StandardAcl(TOPIC, "foo", LITERAL, "User:alice", "*", READ, ALLOW), new StandardAcl(TOPIC, "bar", LITERAL, "User:bob", host2.getHostAddress(), READ, ALLOW), new StandardAcl(TOPIC, "bar", LITERAL, "User:*", InetAddress.getLocalHost().getHostAddress(), DESCRIBE, ALLOW) ); acls.forEach(acl -> { StandardAclWithId aclWithId = withId(acl); authorizer.addAcl(aclWithId.id(), aclWithId.acl()); }); List<Action> actions = List.of( newAction(READ, TOPIC, "foo"), newAction(READ, TOPIC, "bar"), newAction(DESCRIBE, TOPIC, "bar") ); assertEquals(List.of(ALLOWED, DENIED, ALLOWED), authorizer.authorize( newRequestContext("alice", InetAddress.getLocalHost()), actions)); assertEquals(List.of(DENIED, DENIED, DENIED), authorizer.authorize( newRequestContext("alice", host1), actions)); assertEquals(List.of(ALLOWED, DENIED, DENIED), authorizer.authorize( newRequestContext("alice", host2), actions)); assertEquals(List.of(DENIED, DENIED, ALLOWED), authorizer.authorize( newRequestContext("bob", InetAddress.getLocalHost()), actions)); assertEquals(List.of(DENIED, DENIED, DENIED), authorizer.authorize( newRequestContext("bob", host1), actions)); assertEquals(List.of(DENIED, ALLOWED, ALLOWED), authorizer.authorize( newRequestContext("bob", host2), actions)); } private AuthorizableRequestContext newRequestContext(String principal, InetAddress clientAddress) throws Exception { return new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, principal)) .setClientAddress(clientAddress) .build(); } private static void addManyAcls(StandardAuthorizer authorizer) { List<StandardAcl> acls = List.of( new StandardAcl(TOPIC, "green2", LITERAL, "User:*", "*", READ, ALLOW), new StandardAcl(TOPIC, "green", PREFIXED, "User:bob", "*", READ, ALLOW), new StandardAcl(TOPIC, "betamax4", LITERAL, "User:bob", "*", READ, ALLOW), new StandardAcl(TOPIC, "betamax", LITERAL, "User:bob", "*", READ, ALLOW), new StandardAcl(TOPIC, "beta", PREFIXED, "User:*", "*", READ, ALLOW), new StandardAcl(TOPIC, "alpha", PREFIXED, "User:*", "*", READ, ALLOW), new StandardAcl(TOPIC, "alp", PREFIXED, "User:bob", "*", READ, DENY), new StandardAcl(GROUP, "*", LITERAL, "User:bob", "*", WRITE, ALLOW), new StandardAcl(GROUP, "wheel", LITERAL, "User:*", "*", WRITE, DENY) ); acls.forEach(acl -> { StandardAclWithId aclWithId = withId(acl); authorizer.addAcl(aclWithId.id(), aclWithId.acl()); }); } @Test public void testAuthorizationWithManyAcls() throws Exception { StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); addManyAcls(authorizer); assertEquals(List.of(ALLOWED, DENIED), authorizer.authorize(new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), List.of(newAction(READ, TOPIC, "green1"), newAction(WRITE, GROUP, "wheel")))); assertEquals(List.of(DENIED, ALLOWED, DENIED), authorizer.authorize(new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), List.of(newAction(READ, TOPIC, "alpha"), newAction(WRITE, GROUP, "arbitrary"), newAction(READ, TOPIC, "ala")))); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void testDenyAuditLogging(boolean logIfDenied) throws Exception { try (MockedStatic<LoggerFactory> mockedLoggerFactory = Mockito.mockStatic(LoggerFactory.class)) { Logger otherLog = Mockito.mock(Logger.class); Logger auditLog = Mockito.mock(Logger.class); mockedLoggerFactory .when(() -> LoggerFactory.getLogger("kafka.authorizer.logger")) .thenReturn(auditLog); mockedLoggerFactory .when(() -> LoggerFactory.getLogger(Mockito.any(Class.class))) .thenReturn(otherLog); Mockito.when(auditLog.isDebugEnabled()).thenReturn(true); Mockito.when(auditLog.isTraceEnabled()).thenReturn(true); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); addManyAcls(authorizer); ResourcePattern topicResource = new ResourcePattern(TOPIC, "alpha", LITERAL); Action action = new Action(READ, topicResource, 1, false, logIfDenied); MockAuthorizableRequestContext requestContext = new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")) .setClientAddress(InetAddress.getByName("127.0.0.1")) .build(); assertEquals(List.of(DENIED), authorizer.authorize(requestContext, List.of(action))); String expectedAuditLog = "Principal = User:bob is Denied operation = READ " + "from host = 127.0.0.1 on resource = Topic:LITERAL:alpha for request = Fetch " + "with resourceRefCount = 1 based on rule MatchingAcl(acl=StandardAcl[resourceType=TOPIC, " + "resourceName=alp, patternType=PREFIXED, principal=User:bob, host=*, operation=READ, " + "permissionType=DENY])"; if (logIfDenied) { Mockito.verify(auditLog).info(expectedAuditLog); } else { Mockito.verify(auditLog).trace(expectedAuditLog); } } } @ParameterizedTest @ValueSource(booleans = {true, false}) public void testAllowAuditLogging(boolean logIfAllowed) throws Exception { try (MockedStatic<LoggerFactory> mockedLoggerFactory = Mockito.mockStatic(LoggerFactory.class)) { Logger otherLog = Mockito.mock(Logger.class); Logger auditLog = Mockito.mock(Logger.class); mockedLoggerFactory .when(() -> LoggerFactory.getLogger("kafka.authorizer.logger")) .thenReturn(auditLog); mockedLoggerFactory .when(() -> LoggerFactory.getLogger(Mockito.any(Class.class))) .thenReturn(otherLog); Mockito.when(auditLog.isDebugEnabled()).thenReturn(true); Mockito.when(auditLog.isTraceEnabled()).thenReturn(true); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); addManyAcls(authorizer); ResourcePattern topicResource = new ResourcePattern(TOPIC, "green1", LITERAL); Action action = new Action(READ, topicResource, 1, logIfAllowed, false); MockAuthorizableRequestContext requestContext = new MockAuthorizableRequestContext.Builder() .setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")) .setClientAddress(InetAddress.getByName("127.0.0.1")) .build(); assertEquals(List.of(ALLOWED), authorizer.authorize(requestContext, List.of(action))); String expectedAuditLog = "Principal = User:bob is Allowed operation = READ " + "from host = 127.0.0.1 on resource = Topic:LITERAL:green1 for request = Fetch " + "with resourceRefCount = 1 based on rule MatchingAcl(acl=StandardAcl[resourceType=TOPIC, " + "resourceName=green, patternType=PREFIXED, principal=User:bob, host=*, operation=READ, " + "permissionType=ALLOW])"; if (logIfAllowed) { Mockito.verify(auditLog).debug(expectedAuditLog); } else { Mockito.verify(auditLog).trace(expectedAuditLog); } } } /** * Test that StandardAuthorizer#start returns a completed future for early start * listeners. */ @Test public void testStartWithEarlyStartListeners() { StandardAuthorizer authorizer = new StandardAuthorizer(); authorizer.configure(Map.of(SUPER_USERS_CONFIG, "User:superman")); Map<Endpoint, ? extends CompletionStage<Void>> futures2 = authorizer. start(new AuthorizerTestServerInfo(List.of(PLAINTEXT, CONTROLLER))); assertEquals(Set.of(PLAINTEXT, CONTROLLER), futures2.keySet()); assertFalse(futures2.get(PLAINTEXT).toCompletableFuture().isDone()); assertTrue(futures2.get(CONTROLLER).toCompletableFuture().isDone()); } /** * Test attempts to authorize prior to completeInitialLoad. During this time, only * superusers can be authorized. Other users will get an AuthorizerNotReadyException * exception. Not even an authorization result, just an exception thrown for the whole * batch. */ @Test public void testAuthorizationPriorToCompleteInitialLoad() throws Exception { StandardAuthorizer authorizer = new StandardAuthorizer(); authorizer.configure(Map.of(SUPER_USERS_CONFIG, "User:superman")); authorizer.withPluginMetrics(new PluginMetricsImpl(new Metrics(), Map.of())); assertThrows(AuthorizerNotReadyException.class, () -> authorizer.authorize(new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "bob")).build(), List.of(newAction(READ, TOPIC, "green1"), newAction(READ, TOPIC, "green2")))); assertEquals(List.of(ALLOWED, ALLOWED), authorizer.authorize(new MockAuthorizableRequestContext.Builder(). setPrincipal(new KafkaPrincipal(USER_TYPE, "superman")).build(), List.of(newAction(READ, TOPIC, "green1"), newAction(WRITE, GROUP, "wheel")))); } @Test public void testCompleteInitialLoad() { StandardAuthorizer authorizer = new StandardAuthorizer(); authorizer.configure(Map.of(SUPER_USERS_CONFIG, "User:superman")); Map<Endpoint, ? extends CompletionStage<Void>> futures = authorizer. start(new AuthorizerTestServerInfo(Set.of(PLAINTEXT))); assertEquals(Set.of(PLAINTEXT), futures.keySet()); assertFalse(futures.get(PLAINTEXT).toCompletableFuture().isDone()); authorizer.completeInitialLoad(); assertTrue(futures.get(PLAINTEXT).toCompletableFuture().isDone()); assertFalse(futures.get(PLAINTEXT).toCompletableFuture().isCompletedExceptionally()); } @Test public void testCompleteInitialLoadWithException() { StandardAuthorizer authorizer = new StandardAuthorizer(); authorizer.configure(Map.of(SUPER_USERS_CONFIG, "User:superman")); Map<Endpoint, ? extends CompletionStage<Void>> futures = authorizer. start(new AuthorizerTestServerInfo(List.of(PLAINTEXT, CONTROLLER))); assertEquals(Set.of(PLAINTEXT, CONTROLLER), futures.keySet()); assertFalse(futures.get(PLAINTEXT).toCompletableFuture().isDone()); assertTrue(futures.get(CONTROLLER).toCompletableFuture().isDone()); authorizer.completeInitialLoad(new TimeoutException("timed out")); assertTrue(futures.get(PLAINTEXT).toCompletableFuture().isDone()); assertTrue(futures.get(PLAINTEXT).toCompletableFuture().isCompletedExceptionally()); assertTrue(futures.get(CONTROLLER).toCompletableFuture().isDone()); assertFalse(futures.get(CONTROLLER).toCompletableFuture().isCompletedExceptionally()); } @Test public void testPrefixAcls() throws Exception { StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); List<StandardAcl> acls = List.of( new StandardAcl(TOPIC, "fooa", PREFIXED, "User:alice", "*", ALL, ALLOW), new StandardAcl(TOPIC, "foobar", LITERAL, "User:bob", "*", ALL, ALLOW), new StandardAcl(TOPIC, "f", PREFIXED, "User:bob", "*", ALL, ALLOW) ); acls.forEach(acl -> { StandardAclWithId aclWithId = withId(acl); authorizer.addAcl(aclWithId.id(), aclWithId.acl()); }); assertEquals(List.of(ALLOWED, DENIED, ALLOWED), authorizer.authorize( newRequestContext("bob"), List.of( newAction(WRITE, TOPIC, "foobarr"), newAction(READ, TOPIC, "goobar"), newAction(READ, TOPIC, "fooa")))); assertEquals(List.of(ALLOWED, DENIED, DENIED), authorizer.authorize( newRequestContext("alice"), List.of( newAction(DESCRIBE, TOPIC, "fooa"), newAction(WRITE, TOPIC, "bar"), newAction(READ, TOPIC, "baz")))); } @Test public void testAuthorizerMetrics() throws Exception { // There's always 1 metrics by default, the metrics count assertEquals(1, metrics.metrics().size()); StandardAuthorizer authorizer = createAndInitializeStandardAuthorizer(); assertEquals(List.of(ALLOWED), authorizer.authorize( new MockAuthorizableRequestContext.Builder().setPrincipal(new KafkaPrincipal(USER_TYPE, "superman")).build(), List.of(newAction(READ, TOPIC, "green")))); // StandardAuthorizer has 4 metrics assertEquals(5, metrics.metrics().size()); } }
googleapis/google-cloud-java
35,334
java-assured-workloads/proto-google-cloud-assured-workloads-v1beta1/src/main/java/com/google/cloud/assuredworkloads/v1beta1/RestrictAllowedResourcesRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/assuredworkloads/v1beta1/assuredworkloads.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.assuredworkloads.v1beta1; /** * * * <pre> * Request for restricting list of available resources in Workload environment. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest} */ public final class RestrictAllowedResourcesRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) RestrictAllowedResourcesRequestOrBuilder { private static final long serialVersionUID = 0L; // Use RestrictAllowedResourcesRequest.newBuilder() to construct. private RestrictAllowedResourcesRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private RestrictAllowedResourcesRequest() { name_ = ""; restrictionType_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new RestrictAllowedResourcesRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_RestrictAllowedResourcesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_RestrictAllowedResourcesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.class, com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.Builder .class); } /** * * * <pre> * The type of restriction. * </pre> * * Protobuf enum {@code * google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType} */ public enum RestrictionType implements com.google.protobuf.ProtocolMessageEnum { /** * * * <pre> * Unknown restriction type. * </pre> * * <code>RESTRICTION_TYPE_UNSPECIFIED = 0;</code> */ RESTRICTION_TYPE_UNSPECIFIED(0), /** * * * <pre> * Allow the use all of all gcp products, irrespective of the compliance * posture. This effectively removes gcp.restrictServiceUsage OrgPolicy * on the AssuredWorkloads Folder. * </pre> * * <code>ALLOW_ALL_GCP_RESOURCES = 1;</code> */ ALLOW_ALL_GCP_RESOURCES(1), /** * * * <pre> * Based on Workload's compliance regime, allowed list changes. * See - https://cloud.google.com/assured-workloads/docs/supported-products * for the list of supported resources. * </pre> * * <code>ALLOW_COMPLIANT_RESOURCES = 2;</code> */ ALLOW_COMPLIANT_RESOURCES(2), UNRECOGNIZED(-1), ; /** * * * <pre> * Unknown restriction type. * </pre> * * <code>RESTRICTION_TYPE_UNSPECIFIED = 0;</code> */ public static final int RESTRICTION_TYPE_UNSPECIFIED_VALUE = 0; /** * * * <pre> * Allow the use all of all gcp products, irrespective of the compliance * posture. This effectively removes gcp.restrictServiceUsage OrgPolicy * on the AssuredWorkloads Folder. * </pre> * * <code>ALLOW_ALL_GCP_RESOURCES = 1;</code> */ public static final int ALLOW_ALL_GCP_RESOURCES_VALUE = 1; /** * * * <pre> * Based on Workload's compliance regime, allowed list changes. * See - https://cloud.google.com/assured-workloads/docs/supported-products * for the list of supported resources. * </pre> * * <code>ALLOW_COMPLIANT_RESOURCES = 2;</code> */ public static final int ALLOW_COMPLIANT_RESOURCES_VALUE = 2; public final int getNumber() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalArgumentException( "Can't get the number of an unknown enum value."); } return value; } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static RestrictionType valueOf(int value) { return forNumber(value); } /** * @param value The numeric wire value of the corresponding enum entry. * @return The enum associated with the given numeric wire value. */ public static RestrictionType forNumber(int value) { switch (value) { case 0: return RESTRICTION_TYPE_UNSPECIFIED; case 1: return ALLOW_ALL_GCP_RESOURCES; case 2: return ALLOW_COMPLIANT_RESOURCES; default: return null; } } public static com.google.protobuf.Internal.EnumLiteMap<RestrictionType> internalGetValueMap() { return internalValueMap; } private static final com.google.protobuf.Internal.EnumLiteMap<RestrictionType> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<RestrictionType>() { public RestrictionType findValueByNumber(int number) { return RestrictionType.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { if (this == UNRECOGNIZED) { throw new java.lang.IllegalStateException( "Can't get the descriptor of an unrecognized enum value."); } return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { return getDescriptor(); } public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest .getDescriptor() .getEnumTypes() .get(0); } private static final RestrictionType[] VALUES = values(); public static RestrictionType valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc) { if (desc.getType() != getDescriptor()) { throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type."); } if (desc.getIndex() == -1) { return UNRECOGNIZED; } return VALUES[desc.getIndex()]; } private final int value; private RestrictionType(int value) { this.value = value; } // @@protoc_insertion_point(enum_scope:google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType) } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESTRICTION_TYPE_FIELD_NUMBER = 2; private int restrictionType_ = 0; /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for restrictionType. */ @java.lang.Override public int getRestrictionTypeValue() { return restrictionType_; } /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The restrictionType. */ @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType getRestrictionType() { com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType result = com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest .RestrictionType.forNumber(restrictionType_); return result == null ? com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType .UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (restrictionType_ != com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType .RESTRICTION_TYPE_UNSPECIFIED .getNumber()) { output.writeEnum(2, restrictionType_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (restrictionType_ != com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType .RESTRICTION_TYPE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, restrictionType_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest)) { return super.equals(obj); } com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest other = (com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) obj; if (!getName().equals(other.getName())) return false; if (restrictionType_ != other.restrictionType_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + RESTRICTION_TYPE_FIELD_NUMBER; hash = (53 * hash) + restrictionType_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for restricting list of available resources in Workload environment. * </pre> * * Protobuf type {@code google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_RestrictAllowedResourcesRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_RestrictAllowedResourcesRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.class, com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.Builder .class); } // Construct using // com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; restrictionType_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.assuredworkloads.v1beta1.AssuredworkloadsProto .internal_static_google_cloud_assuredworkloads_v1beta1_RestrictAllowedResourcesRequest_descriptor; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest getDefaultInstanceForType() { return com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest build() { com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest buildPartial() { com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest result = new com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.restrictionType_ = restrictionType_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) { return mergeFrom( (com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest other) { if (other == com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.restrictionType_ != 0) { setRestrictionTypeValue(other.getRestrictionTypeValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { restrictionType_ = input.readEnum(); bitField0_ |= 0x00000002; break; } // case 16 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Workload. This is the workloads's * relative path in the API, formatted as * "organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}". * For example, * "organizations/123/locations/us-east1/workloads/assured-workload-1". * </pre> * * <code>string name = 1 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int restrictionType_ = 0; /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The enum numeric value on the wire for restrictionType. */ @java.lang.Override public int getRestrictionTypeValue() { return restrictionType_; } /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The enum numeric value on the wire for restrictionType to set. * @return This builder for chaining. */ public Builder setRestrictionTypeValue(int value) { restrictionType_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The restrictionType. */ @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType getRestrictionType() { com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType result = com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest .RestrictionType.forNumber(restrictionType_); return result == null ? com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest .RestrictionType.UNRECOGNIZED : result; } /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @param value The restrictionType to set. * @return This builder for chaining. */ public Builder setRestrictionType( com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; restrictionType_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Required. The type of restriction for using gcp products in the Workload environment. * </pre> * * <code> * .google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest.RestrictionType restriction_type = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return This builder for chaining. */ public Builder clearRestrictionType() { bitField0_ = (bitField0_ & ~0x00000002); restrictionType_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) } // @@protoc_insertion_point(class_scope:google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest) private static final com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest(); } public static com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<RestrictAllowedResourcesRequest> PARSER = new com.google.protobuf.AbstractParser<RestrictAllowedResourcesRequest>() { @java.lang.Override public RestrictAllowedResourcesRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<RestrictAllowedResourcesRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<RestrictAllowedResourcesRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.assuredworkloads.v1beta1.RestrictAllowedResourcesRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/incubator-kie-drools
32,857
drools-drl/drools-drl-parser/src/main/java/org/drools/drl/parser/lang/dsl/DSLMapWalker.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ // $ANTLR 3.5 src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g 2014-02-20 15:51:32 package org.drools.drl.parser.lang.dsl; import java.util.Map; import java.util.HashMap; import org.antlr.runtime.*; import org.antlr.runtime.tree.*; import java.util.Stack; @SuppressWarnings("all") public class DSLMapWalker extends TreeParser { public static final String[] tokenNames = new String[] { "<invalid>", "<EOR>", "<DOWN>", "<UP>", "COLON", "COMMA", "DOT", "EOL", "EQUALS", "EscapeSequence", "IdentifierPart", "LEFT_CURLY", "LEFT_SQUARE", "LITERAL", "MISC", "RIGHT_CURLY", "RIGHT_SQUARE", "VT_ANY", "VT_CONDITION", "VT_CONSEQUENCE", "VT_DSL_GRAMMAR", "VT_ENTRY", "VT_ENTRY_KEY", "VT_ENTRY_VAL", "VT_KEYWORD", "VT_LITERAL", "VT_META", "VT_PATTERN", "VT_QUAL", "VT_SCOPE", "VT_SPACE", "VT_VAR_DEF", "VT_VAR_REF", "WS" }; public static final int EOF=-1; public static final int COLON=4; public static final int COMMA=5; public static final int DOT=6; public static final int EOL=7; public static final int EQUALS=8; public static final int EscapeSequence=9; public static final int IdentifierPart=10; public static final int LEFT_CURLY=11; public static final int LEFT_SQUARE=12; public static final int LITERAL=13; public static final int MISC=14; public static final int RIGHT_CURLY=15; public static final int RIGHT_SQUARE=16; public static final int VT_ANY=17; public static final int VT_CONDITION=18; public static final int VT_CONSEQUENCE=19; public static final int VT_DSL_GRAMMAR=20; public static final int VT_ENTRY=21; public static final int VT_ENTRY_KEY=22; public static final int VT_ENTRY_VAL=23; public static final int VT_KEYWORD=24; public static final int VT_LITERAL=25; public static final int VT_META=26; public static final int VT_PATTERN=27; public static final int VT_QUAL=28; public static final int VT_SCOPE=29; public static final int VT_SPACE=30; public static final int VT_VAR_DEF=31; public static final int VT_VAR_REF=32; public static final int WS=33; // delegates public TreeParser[] getDelegates() { return new TreeParser[] {}; } // delegators public DSLMapWalker(TreeNodeStream input) { this(input, new RecognizerSharedState()); } public DSLMapWalker(TreeNodeStream input, RecognizerSharedState state) { super(input, state); } @Override public String[] getTokenNames() { return DSLMapWalker.tokenNames; } @Override public String getGrammarFileName() { return "src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g"; } protected static class mapping_file_scope { DSLMapping retval; } protected Stack<mapping_file_scope> mapping_file_stack = new Stack<mapping_file_scope>(); // $ANTLR start "mapping_file" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:16:1: mapping_file returns [DSLMapping mapping] : ^( VT_DSL_GRAMMAR ( valid_entry )* ) ; public final DSLMapping mapping_file() throws RecognitionException { mapping_file_stack.push(new mapping_file_scope()); DSLMapping mapping = null; mapping_file_stack.peek().retval = new DefaultDSLMapping() ; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:23:5: ( ^( VT_DSL_GRAMMAR ( valid_entry )* ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:23:7: ^( VT_DSL_GRAMMAR ( valid_entry )* ) { match(input,VT_DSL_GRAMMAR,FOLLOW_VT_DSL_GRAMMAR_in_mapping_file63); if ( input.LA(1)==Token.DOWN ) { match(input, Token.DOWN, null); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:23:24: ( valid_entry )* loop1: while (true) { int alt1=2; int LA1_0 = input.LA(1); if ( (LA1_0==VT_ENTRY) ) { alt1=1; } switch (alt1) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:23:24: valid_entry { pushFollow(FOLLOW_valid_entry_in_mapping_file65); valid_entry(); state._fsp--; } break; default : break loop1; } } match(input, Token.UP, null); } mapping = mapping_file_stack.peek().retval; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving mapping_file_stack.pop(); } return mapping; } // $ANTLR end "mapping_file" // $ANTLR start "valid_entry" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:29:1: valid_entry returns [DSLMappingEntry mappingEntry] : ent= entry ; public final DSLMappingEntry valid_entry() throws RecognitionException { DSLMappingEntry mappingEntry = null; DSLMappingEntry ent =null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:30:5: (ent= entry ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:30:7: ent= entry { pushFollow(FOLLOW_entry_in_valid_entry96); ent=entry(); state._fsp--; mappingEntry = ent; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } return mappingEntry; } // $ANTLR end "valid_entry" protected static class entry_scope { Map<String,Integer> variables; AntlrDSLMappingEntry retval; StringBuilder keybuffer; StringBuilder valuebuffer; StringBuilder sentenceKeyBuffer; StringBuilder sentenceValueBuffer; } protected Stack<entry_scope> entry_stack = new Stack<entry_scope>(); // $ANTLR start "entry" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:34:1: entry returns [DSLMappingEntry mappingEntry] : ^( VT_ENTRY scope_section ( meta_section )? key_section ( value_section )? ) ; public final DSLMappingEntry entry() throws RecognitionException { entry_stack.push(new entry_scope()); DSLMappingEntry mappingEntry = null; entry_stack.peek().retval = new AntlrDSLMappingEntry() ; entry_stack.peek().variables = new HashMap<String,Integer>(); entry_stack.peek().keybuffer = new StringBuilder(); entry_stack.peek().valuebuffer = new StringBuilder(); entry_stack.peek().sentenceKeyBuffer = new StringBuilder(); entry_stack.peek().sentenceValueBuffer = new StringBuilder(); try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:51:5: ( ^( VT_ENTRY scope_section ( meta_section )? key_section ( value_section )? ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:51:7: ^( VT_ENTRY scope_section ( meta_section )? key_section ( value_section )? ) { match(input,VT_ENTRY,FOLLOW_VT_ENTRY_in_entry130); match(input, Token.DOWN, null); pushFollow(FOLLOW_scope_section_in_entry132); scope_section(); state._fsp--; // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:51:32: ( meta_section )? int alt2=2; int LA2_0 = input.LA(1); if ( (LA2_0==VT_META) ) { alt2=1; } switch (alt2) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:51:32: meta_section { pushFollow(FOLLOW_meta_section_in_entry134); meta_section(); state._fsp--; } break; } pushFollow(FOLLOW_key_section_in_entry137); key_section(); state._fsp--; entry_stack.peek().retval.setVariables( entry_stack.peek().variables ); entry_stack.peek().retval.setMappingKey(entry_stack.peek().sentenceKeyBuffer.toString()); entry_stack.peek().retval.setKeyPattern(entry_stack.peek().keybuffer.toString()); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:56:9: ( value_section )? int alt3=2; int LA3_0 = input.LA(1); if ( (LA3_0==VT_ENTRY_VAL) ) { alt3=1; } switch (alt3) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:56:9: value_section { pushFollow(FOLLOW_value_section_in_entry161); value_section(); state._fsp--; } break; } match(input, Token.UP, null); entry_stack.peek().retval.setMappingValue(entry_stack.peek().sentenceValueBuffer.toString()); entry_stack.peek().retval.setValuePattern(entry_stack.peek().valuebuffer.toString()); mappingEntry = entry_stack.peek().retval; mapping_file_stack.peek().retval.addEntry(mappingEntry); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving entry_stack.pop(); } return mappingEntry; } // $ANTLR end "entry" // $ANTLR start "scope_section" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:66:1: scope_section : ^(thescope= VT_SCOPE ( condition_key )? ( consequence_key )? ( keyword_key )? ( any_key )? ) ; public final void scope_section() throws RecognitionException { CommonTree thescope=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:5: ( ^(thescope= VT_SCOPE ( condition_key )? ( consequence_key )? ( keyword_key )? ( any_key )? ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:7: ^(thescope= VT_SCOPE ( condition_key )? ( consequence_key )? ( keyword_key )? ( any_key )? ) { thescope=(CommonTree)match(input,VT_SCOPE,FOLLOW_VT_SCOPE_in_scope_section191); if ( input.LA(1)==Token.DOWN ) { match(input, Token.DOWN, null); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:27: ( condition_key )? int alt4=2; int LA4_0 = input.LA(1); if ( (LA4_0==VT_CONDITION) ) { alt4=1; } switch (alt4) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:27: condition_key { pushFollow(FOLLOW_condition_key_in_scope_section193); condition_key(); state._fsp--; } break; } // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:42: ( consequence_key )? int alt5=2; int LA5_0 = input.LA(1); if ( (LA5_0==VT_CONSEQUENCE) ) { alt5=1; } switch (alt5) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:42: consequence_key { pushFollow(FOLLOW_consequence_key_in_scope_section196); consequence_key(); state._fsp--; } break; } // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:59: ( keyword_key )? int alt6=2; int LA6_0 = input.LA(1); if ( (LA6_0==VT_KEYWORD) ) { alt6=1; } switch (alt6) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:59: keyword_key { pushFollow(FOLLOW_keyword_key_in_scope_section199); keyword_key(); state._fsp--; } break; } // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:72: ( any_key )? int alt7=2; int LA7_0 = input.LA(1); if ( (LA7_0==VT_ANY) ) { alt7=1; } switch (alt7) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:67:72: any_key { pushFollow(FOLLOW_any_key_in_scope_section202); any_key(); state._fsp--; } break; } match(input, Token.UP, null); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "scope_section" // $ANTLR start "meta_section" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:72:1: meta_section : ^( VT_META (metalit= LITERAL )? ) ; public final void meta_section() throws RecognitionException { CommonTree metalit=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:73:5: ( ^( VT_META (metalit= LITERAL )? ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:73:7: ^( VT_META (metalit= LITERAL )? ) { match(input,VT_META,FOLLOW_VT_META_in_meta_section224); if ( input.LA(1)==Token.DOWN ) { match(input, Token.DOWN, null); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:73:24: (metalit= LITERAL )? int alt8=2; int LA8_0 = input.LA(1); if ( (LA8_0==LITERAL) ) { alt8=1; } switch (alt8) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:73:24: metalit= LITERAL { metalit=(CommonTree)match(input,LITERAL,FOLLOW_LITERAL_in_meta_section228); } break; } match(input, Token.UP, null); } if ( metalit == null || (metalit!=null?metalit.getText():null) == null || (metalit!=null?metalit.getText():null).length() == 0 ) { entry_stack.peek().retval.setMetaData(DSLMappingEntry.EMPTY_METADATA); } else { entry_stack.peek().retval.setMetaData(new DSLMappingEntry.DefaultDSLEntryMetaData( (metalit!=null?metalit.getText():null) )); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "meta_section" // $ANTLR start "key_section" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:83:1: key_section : ^( VT_ENTRY_KEY ( key_sentence )+ ) ; public final void key_section() throws RecognitionException { try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:84:5: ( ^( VT_ENTRY_KEY ( key_sentence )+ ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:84:7: ^( VT_ENTRY_KEY ( key_sentence )+ ) { match(input,VT_ENTRY_KEY,FOLLOW_VT_ENTRY_KEY_in_key_section254); match(input, Token.DOWN, null); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:84:22: ( key_sentence )+ int cnt9=0; loop9: while (true) { int alt9=2; int LA9_0 = input.LA(1); if ( (LA9_0==VT_LITERAL||(LA9_0 >= VT_SPACE && LA9_0 <= VT_VAR_DEF)) ) { alt9=1; } switch (alt9) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:84:22: key_sentence { pushFollow(FOLLOW_key_sentence_in_key_section256); key_sentence(); state._fsp--; } break; default : if ( cnt9 >= 1 ) break loop9; EarlyExitException eee = new EarlyExitException(9, input); throw eee; } cnt9++; } match(input, Token.UP, null); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "key_section" // $ANTLR start "key_sentence" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:87:1: key_sentence : ( variable_definition |vtl= VT_LITERAL | VT_SPACE ); public final void key_sentence() throws RecognitionException { CommonTree vtl=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:88:5: ( variable_definition |vtl= VT_LITERAL | VT_SPACE ) int alt10=3; switch ( input.LA(1) ) { case VT_VAR_DEF: { alt10=1; } break; case VT_LITERAL: { alt10=2; } break; case VT_SPACE: { alt10=3; } break; default: NoViableAltException nvae = new NoViableAltException("", 10, 0, input); throw nvae; } switch (alt10) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:88:7: variable_definition { pushFollow(FOLLOW_variable_definition_in_key_sentence277); variable_definition(); state._fsp--; } break; case 2 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:89:7: vtl= VT_LITERAL { vtl=(CommonTree)match(input,VT_LITERAL,FOLLOW_VT_LITERAL_in_key_sentence287); entry_stack.peek().keybuffer.append((vtl!=null?vtl.getText():null)); entry_stack.peek().sentenceKeyBuffer.append((vtl!=null?vtl.getText():null)); } break; case 3 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:94:7: VT_SPACE { match(input,VT_SPACE,FOLLOW_VT_SPACE_in_key_sentence301); entry_stack.peek().keybuffer.append("\\s+"); entry_stack.peek().sentenceKeyBuffer.append(" "); } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "key_sentence" // $ANTLR start "value_section" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:101:1: value_section : ^( VT_ENTRY_VAL ( value_sentence )+ ) ; public final void value_section() throws RecognitionException { try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:105:5: ( ^( VT_ENTRY_VAL ( value_sentence )+ ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:105:7: ^( VT_ENTRY_VAL ( value_sentence )+ ) { match(input,VT_ENTRY_VAL,FOLLOW_VT_ENTRY_VAL_in_value_section329); match(input, Token.DOWN, null); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:105:22: ( value_sentence )+ int cnt11=0; loop11: while (true) { int alt11=2; int LA11_0 = input.LA(1); if ( (LA11_0==VT_LITERAL||LA11_0==VT_SPACE||LA11_0==VT_VAR_REF) ) { alt11=1; } switch (alt11) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:105:22: value_sentence { pushFollow(FOLLOW_value_sentence_in_value_section331); value_sentence(); state._fsp--; } break; default : if ( cnt11 >= 1 ) break loop11; EarlyExitException eee = new EarlyExitException(11, input); throw eee; } cnt11++; } match(input, Token.UP, null); } entry_stack.peek().valuebuffer.append(" "); } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "value_section" // $ANTLR start "value_sentence" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:108:1: value_sentence : ( variable_reference |vtl= VT_LITERAL | VT_SPACE ); public final void value_sentence() throws RecognitionException { CommonTree vtl=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:109:5: ( variable_reference |vtl= VT_LITERAL | VT_SPACE ) int alt12=3; switch ( input.LA(1) ) { case VT_VAR_REF: { alt12=1; } break; case VT_LITERAL: { alt12=2; } break; case VT_SPACE: { alt12=3; } break; default: NoViableAltException nvae = new NoViableAltException("", 12, 0, input); throw nvae; } switch (alt12) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:109:7: variable_reference { pushFollow(FOLLOW_variable_reference_in_value_sentence353); variable_reference(); state._fsp--; } break; case 2 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:110:7: vtl= VT_LITERAL { vtl=(CommonTree)match(input,VT_LITERAL,FOLLOW_VT_LITERAL_in_value_sentence363); entry_stack.peek().valuebuffer.append((vtl!=null?vtl.getText():null)); entry_stack.peek().sentenceValueBuffer.append((vtl!=null?vtl.getText():null)); } break; case 3 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:115:7: VT_SPACE { match(input,VT_SPACE,FOLLOW_VT_SPACE_in_value_sentence377); entry_stack.peek().valuebuffer.append(" "); entry_stack.peek().sentenceValueBuffer.append(" "); } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "value_sentence" // $ANTLR start "literal" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:122:1: literal : theliteral= VT_LITERAL ; public final void literal() throws RecognitionException { CommonTree theliteral=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:123:5: (theliteral= VT_LITERAL ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:123:7: theliteral= VT_LITERAL { theliteral=(CommonTree)match(input,VT_LITERAL,FOLLOW_VT_LITERAL_in_literal403); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "literal" // $ANTLR start "variable_definition" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:126:1: variable_definition : ^( VT_VAR_DEF varname= LITERAL ^( VT_QUAL (q= LITERAL )? ) (pattern= VT_PATTERN )? ) ; public final void variable_definition() throws RecognitionException { CommonTree varname=null; CommonTree q=null; CommonTree pattern=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:127:5: ( ^( VT_VAR_DEF varname= LITERAL ^( VT_QUAL (q= LITERAL )? ) (pattern= VT_PATTERN )? ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:127:9: ^( VT_VAR_DEF varname= LITERAL ^( VT_QUAL (q= LITERAL )? ) (pattern= VT_PATTERN )? ) { match(input,VT_VAR_DEF,FOLLOW_VT_VAR_DEF_in_variable_definition423); match(input, Token.DOWN, null); varname=(CommonTree)match(input,LITERAL,FOLLOW_LITERAL_in_variable_definition427); match(input,VT_QUAL,FOLLOW_VT_QUAL_in_variable_definition430); if ( input.LA(1)==Token.DOWN ) { match(input, Token.DOWN, null); // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:127:49: (q= LITERAL )? int alt13=2; int LA13_0 = input.LA(1); if ( (LA13_0==LITERAL) ) { alt13=1; } switch (alt13) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:127:49: q= LITERAL { q=(CommonTree)match(input,LITERAL,FOLLOW_LITERAL_in_variable_definition434); } break; } match(input, Token.UP, null); } // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:127:67: (pattern= VT_PATTERN )? int alt14=2; int LA14_0 = input.LA(1); if ( (LA14_0==VT_PATTERN) ) { alt14=1; } switch (alt14) { case 1 : // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:127:67: pattern= VT_PATTERN { pattern=(CommonTree)match(input,VT_PATTERN,FOLLOW_VT_PATTERN_in_variable_definition440); } break; } match(input, Token.UP, null); entry_stack.peek().variables.put((varname!=null?varname.getText():null), Integer.valueOf(0)); if(q!=null && pattern!=null){ entry_stack.peek().sentenceKeyBuffer.append("{"+(varname!=null?varname.getText():null)+":"+(q!=null?q.getText():null)+":"+(pattern!=null?pattern.getText():null)+"}"); }else if(q==null && pattern!=null){ entry_stack.peek().sentenceKeyBuffer.append("{"+(varname!=null?varname.getText():null)+":"+(pattern!=null?pattern.getText():null)+"}"); }else{ entry_stack.peek().sentenceKeyBuffer.append("{"+(varname!=null?varname.getText():null)+"}"); } if(q == null || (!q.getText().equals("ENUM") && !q.getText().equals("CF") && !q.getText().equals("DATE") && !q.getText().equals("BOOLEAN"))){ entry_stack.peek().keybuffer.append(pattern != null? "(" + (pattern!=null?pattern.getText():null) + ")" : "(.*?)"); }else{ entry_stack.peek().keybuffer.append("(.*?)"); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "variable_definition" // $ANTLR start "variable_reference" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:149:1: variable_reference : ^(varref= VT_VAR_REF lit= LITERAL ) ; public final void variable_reference() throws RecognitionException { CommonTree varref=null; CommonTree lit=null; try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:150:5: ( ^(varref= VT_VAR_REF lit= LITERAL ) ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:150:7: ^(varref= VT_VAR_REF lit= LITERAL ) { varref=(CommonTree)match(input,VT_VAR_REF,FOLLOW_VT_VAR_REF_in_variable_reference471); match(input, Token.DOWN, null); lit=(CommonTree)match(input,LITERAL,FOLLOW_LITERAL_in_variable_reference475); match(input, Token.UP, null); entry_stack.peek().valuebuffer.append("{" + (lit!=null?lit.getText():null) + "}" ); entry_stack.peek().sentenceValueBuffer.append("{"+(lit!=null?lit.getText():null)+"}"); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "variable_reference" // $ANTLR start "condition_key" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:157:1: condition_key : VT_CONDITION ; public final void condition_key() throws RecognitionException { try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:158:5: ( VT_CONDITION ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:158:7: VT_CONDITION { match(input,VT_CONDITION,FOLLOW_VT_CONDITION_in_condition_key500); entry_stack.peek().retval.setSection(DSLMappingEntry.CONDITION); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "condition_key" // $ANTLR start "consequence_key" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:162:1: consequence_key : VT_CONSEQUENCE ; public final void consequence_key() throws RecognitionException { try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:163:5: ( VT_CONSEQUENCE ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:163:7: VT_CONSEQUENCE { match(input,VT_CONSEQUENCE,FOLLOW_VT_CONSEQUENCE_in_consequence_key524); entry_stack.peek().retval.setSection(DSLMappingEntry.CONSEQUENCE); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "consequence_key" // $ANTLR start "keyword_key" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:167:1: keyword_key : VT_KEYWORD ; public final void keyword_key() throws RecognitionException { try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:168:5: ( VT_KEYWORD ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:168:7: VT_KEYWORD { match(input,VT_KEYWORD,FOLLOW_VT_KEYWORD_in_keyword_key548); entry_stack.peek().retval.setSection(DSLMappingEntry.KEYWORD); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "keyword_key" // $ANTLR start "any_key" // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:172:1: any_key : VT_ANY ; public final void any_key() throws RecognitionException { try { // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:173:5: ( VT_ANY ) // src/main/resources/org/drools/compiler/lang/dsl/DSLMapWalker.g:173:7: VT_ANY { match(input,VT_ANY,FOLLOW_VT_ANY_in_any_key572); entry_stack.peek().retval.setSection(DSLMappingEntry.ANY); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } } // $ANTLR end "any_key" // Delegated rules public static final BitSet FOLLOW_VT_DSL_GRAMMAR_in_mapping_file63 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_valid_entry_in_mapping_file65 = new BitSet(new long[]{0x0000000000200008L}); public static final BitSet FOLLOW_entry_in_valid_entry96 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_ENTRY_in_entry130 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_scope_section_in_entry132 = new BitSet(new long[]{0x0000000004400000L}); public static final BitSet FOLLOW_meta_section_in_entry134 = new BitSet(new long[]{0x0000000000400000L}); public static final BitSet FOLLOW_key_section_in_entry137 = new BitSet(new long[]{0x0000000000800008L}); public static final BitSet FOLLOW_value_section_in_entry161 = new BitSet(new long[]{0x0000000000000008L}); public static final BitSet FOLLOW_VT_SCOPE_in_scope_section191 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_condition_key_in_scope_section193 = new BitSet(new long[]{0x00000000010A0008L}); public static final BitSet FOLLOW_consequence_key_in_scope_section196 = new BitSet(new long[]{0x0000000001020008L}); public static final BitSet FOLLOW_keyword_key_in_scope_section199 = new BitSet(new long[]{0x0000000000020008L}); public static final BitSet FOLLOW_any_key_in_scope_section202 = new BitSet(new long[]{0x0000000000000008L}); public static final BitSet FOLLOW_VT_META_in_meta_section224 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_LITERAL_in_meta_section228 = new BitSet(new long[]{0x0000000000000008L}); public static final BitSet FOLLOW_VT_ENTRY_KEY_in_key_section254 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_key_sentence_in_key_section256 = new BitSet(new long[]{0x00000000C2000008L}); public static final BitSet FOLLOW_variable_definition_in_key_sentence277 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_LITERAL_in_key_sentence287 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_SPACE_in_key_sentence301 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_ENTRY_VAL_in_value_section329 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_value_sentence_in_value_section331 = new BitSet(new long[]{0x0000000142000008L}); public static final BitSet FOLLOW_variable_reference_in_value_sentence353 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_LITERAL_in_value_sentence363 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_SPACE_in_value_sentence377 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_LITERAL_in_literal403 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_VAR_DEF_in_variable_definition423 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_LITERAL_in_variable_definition427 = new BitSet(new long[]{0x0000000010000000L}); public static final BitSet FOLLOW_VT_QUAL_in_variable_definition430 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_LITERAL_in_variable_definition434 = new BitSet(new long[]{0x0000000000000008L}); public static final BitSet FOLLOW_VT_PATTERN_in_variable_definition440 = new BitSet(new long[]{0x0000000000000008L}); public static final BitSet FOLLOW_VT_VAR_REF_in_variable_reference471 = new BitSet(new long[]{0x0000000000000004L}); public static final BitSet FOLLOW_LITERAL_in_variable_reference475 = new BitSet(new long[]{0x0000000000000008L}); public static final BitSet FOLLOW_VT_CONDITION_in_condition_key500 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_CONSEQUENCE_in_consequence_key524 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_KEYWORD_in_keyword_key548 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_VT_ANY_in_any_key572 = new BitSet(new long[]{0x0000000000000002L}); }
googleapis/google-cloud-java
35,626
java-network-security/google-cloud-network-security/src/main/java/com/google/cloud/networksecurity/v1/stub/GrpcOrganizationAddressGroupServiceStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.networksecurity.v1.stub; import static com.google.cloud.networksecurity.v1.OrganizationAddressGroupServiceClient.ListAddressGroupReferencesPagedResponse; import static com.google.cloud.networksecurity.v1.OrganizationAddressGroupServiceClient.ListAddressGroupsPagedResponse; import static com.google.cloud.networksecurity.v1.OrganizationAddressGroupServiceClient.ListLocationsPagedResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.networksecurity.v1.AddAddressGroupItemsRequest; import com.google.cloud.networksecurity.v1.AddressGroup; import com.google.cloud.networksecurity.v1.CloneAddressGroupItemsRequest; import com.google.cloud.networksecurity.v1.CreateAddressGroupRequest; import com.google.cloud.networksecurity.v1.DeleteAddressGroupRequest; import com.google.cloud.networksecurity.v1.GetAddressGroupRequest; import com.google.cloud.networksecurity.v1.ListAddressGroupReferencesRequest; import com.google.cloud.networksecurity.v1.ListAddressGroupReferencesResponse; import com.google.cloud.networksecurity.v1.ListAddressGroupsRequest; import com.google.cloud.networksecurity.v1.ListAddressGroupsResponse; import com.google.cloud.networksecurity.v1.OperationMetadata; import com.google.cloud.networksecurity.v1.RemoveAddressGroupItemsRequest; import com.google.cloud.networksecurity.v1.UpdateAddressGroupRequest; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.longrunning.Operation; import com.google.longrunning.stub.GrpcOperationsStub; import com.google.protobuf.Empty; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the OrganizationAddressGroupService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcOrganizationAddressGroupServiceStub extends OrganizationAddressGroupServiceStub { private static final MethodDescriptor<ListAddressGroupsRequest, ListAddressGroupsResponse> listAddressGroupsMethodDescriptor = MethodDescriptor.<ListAddressGroupsRequest, ListAddressGroupsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/ListAddressGroups") .setRequestMarshaller( ProtoUtils.marshaller(ListAddressGroupsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListAddressGroupsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetAddressGroupRequest, AddressGroup> getAddressGroupMethodDescriptor = MethodDescriptor.<GetAddressGroupRequest, AddressGroup>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/GetAddressGroup") .setRequestMarshaller( ProtoUtils.marshaller(GetAddressGroupRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(AddressGroup.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CreateAddressGroupRequest, Operation> createAddressGroupMethodDescriptor = MethodDescriptor.<CreateAddressGroupRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/CreateAddressGroup") .setRequestMarshaller( ProtoUtils.marshaller(CreateAddressGroupRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<UpdateAddressGroupRequest, Operation> updateAddressGroupMethodDescriptor = MethodDescriptor.<UpdateAddressGroupRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/UpdateAddressGroup") .setRequestMarshaller( ProtoUtils.marshaller(UpdateAddressGroupRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsMethodDescriptor = MethodDescriptor.<AddAddressGroupItemsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/AddAddressGroupItems") .setRequestMarshaller( ProtoUtils.marshaller(AddAddressGroupItemsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<RemoveAddressGroupItemsRequest, Operation> removeAddressGroupItemsMethodDescriptor = MethodDescriptor.<RemoveAddressGroupItemsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/RemoveAddressGroupItems") .setRequestMarshaller( ProtoUtils.marshaller(RemoveAddressGroupItemsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<CloneAddressGroupItemsRequest, Operation> cloneAddressGroupItemsMethodDescriptor = MethodDescriptor.<CloneAddressGroupItemsRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/CloneAddressGroupItems") .setRequestMarshaller( ProtoUtils.marshaller(CloneAddressGroupItemsRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<DeleteAddressGroupRequest, Operation> deleteAddressGroupMethodDescriptor = MethodDescriptor.<DeleteAddressGroupRequest, Operation>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/DeleteAddressGroup") .setRequestMarshaller( ProtoUtils.marshaller(DeleteAddressGroupRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor< ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse> listAddressGroupReferencesMethodDescriptor = MethodDescriptor .<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.cloud.networksecurity.v1.OrganizationAddressGroupService/ListAddressGroupReferences") .setRequestMarshaller( ProtoUtils.marshaller(ListAddressGroupReferencesRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListAddressGroupReferencesResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = MethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/ListLocations") .setRequestMarshaller( ProtoUtils.marshaller(ListLocationsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(ListLocationsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = MethodDescriptor.<GetLocationRequest, Location>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.cloud.location.Locations/GetLocation") .setRequestMarshaller(ProtoUtils.marshaller(GetLocationRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Location.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor = MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor = MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy") .setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsMethodDescriptor = MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions") .setRequestMarshaller( ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance())) .setSampledToLocalTracing(true) .build(); private final UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsResponse> listAddressGroupsCallable; private final UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsPagedResponse> listAddressGroupsPagedCallable; private final UnaryCallable<GetAddressGroupRequest, AddressGroup> getAddressGroupCallable; private final UnaryCallable<CreateAddressGroupRequest, Operation> createAddressGroupCallable; private final OperationCallable<CreateAddressGroupRequest, AddressGroup, OperationMetadata> createAddressGroupOperationCallable; private final UnaryCallable<UpdateAddressGroupRequest, Operation> updateAddressGroupCallable; private final OperationCallable<UpdateAddressGroupRequest, AddressGroup, OperationMetadata> updateAddressGroupOperationCallable; private final UnaryCallable<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsCallable; private final OperationCallable<AddAddressGroupItemsRequest, AddressGroup, OperationMetadata> addAddressGroupItemsOperationCallable; private final UnaryCallable<RemoveAddressGroupItemsRequest, Operation> removeAddressGroupItemsCallable; private final OperationCallable<RemoveAddressGroupItemsRequest, AddressGroup, OperationMetadata> removeAddressGroupItemsOperationCallable; private final UnaryCallable<CloneAddressGroupItemsRequest, Operation> cloneAddressGroupItemsCallable; private final OperationCallable<CloneAddressGroupItemsRequest, AddressGroup, OperationMetadata> cloneAddressGroupItemsOperationCallable; private final UnaryCallable<DeleteAddressGroupRequest, Operation> deleteAddressGroupCallable; private final OperationCallable<DeleteAddressGroupRequest, Empty, OperationMetadata> deleteAddressGroupOperationCallable; private final UnaryCallable<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse> listAddressGroupReferencesCallable; private final UnaryCallable< ListAddressGroupReferencesRequest, ListAddressGroupReferencesPagedResponse> listAddressGroupReferencesPagedCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable; private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable; private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcOrganizationAddressGroupServiceStub create( OrganizationAddressGroupServiceStubSettings settings) throws IOException { return new GrpcOrganizationAddressGroupServiceStub(settings, ClientContext.create(settings)); } public static final GrpcOrganizationAddressGroupServiceStub create(ClientContext clientContext) throws IOException { return new GrpcOrganizationAddressGroupServiceStub( OrganizationAddressGroupServiceStubSettings.newBuilder().build(), clientContext); } public static final GrpcOrganizationAddressGroupServiceStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcOrganizationAddressGroupServiceStub( OrganizationAddressGroupServiceStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcOrganizationAddressGroupServiceStub, using the given settings. * This is protected so that it is easy to make a subclass, but otherwise, the static factory * methods should be preferred. */ protected GrpcOrganizationAddressGroupServiceStub( OrganizationAddressGroupServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcOrganizationAddressGroupServiceCallableFactory()); } /** * Constructs an instance of GrpcOrganizationAddressGroupServiceStub, using the given settings. * This is protected so that it is easy to make a subclass, but otherwise, the static factory * methods should be preferred. */ protected GrpcOrganizationAddressGroupServiceStub( OrganizationAddressGroupServiceStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<ListAddressGroupsRequest, ListAddressGroupsResponse> listAddressGroupsTransportSettings = GrpcCallSettings.<ListAddressGroupsRequest, ListAddressGroupsResponse>newBuilder() .setMethodDescriptor(listAddressGroupsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<GetAddressGroupRequest, AddressGroup> getAddressGroupTransportSettings = GrpcCallSettings.<GetAddressGroupRequest, AddressGroup>newBuilder() .setMethodDescriptor(getAddressGroupMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<CreateAddressGroupRequest, Operation> createAddressGroupTransportSettings = GrpcCallSettings.<CreateAddressGroupRequest, Operation>newBuilder() .setMethodDescriptor(createAddressGroupMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); GrpcCallSettings<UpdateAddressGroupRequest, Operation> updateAddressGroupTransportSettings = GrpcCallSettings.<UpdateAddressGroupRequest, Operation>newBuilder() .setMethodDescriptor(updateAddressGroupMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add( "address_group.name", String.valueOf(request.getAddressGroup().getName())); return builder.build(); }) .build(); GrpcCallSettings<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsTransportSettings = GrpcCallSettings.<AddAddressGroupItemsRequest, Operation>newBuilder() .setMethodDescriptor(addAddressGroupItemsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("address_group", String.valueOf(request.getAddressGroup())); return builder.build(); }) .build(); GrpcCallSettings<RemoveAddressGroupItemsRequest, Operation> removeAddressGroupItemsTransportSettings = GrpcCallSettings.<RemoveAddressGroupItemsRequest, Operation>newBuilder() .setMethodDescriptor(removeAddressGroupItemsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("address_group", String.valueOf(request.getAddressGroup())); return builder.build(); }) .build(); GrpcCallSettings<CloneAddressGroupItemsRequest, Operation> cloneAddressGroupItemsTransportSettings = GrpcCallSettings.<CloneAddressGroupItemsRequest, Operation>newBuilder() .setMethodDescriptor(cloneAddressGroupItemsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("address_group", String.valueOf(request.getAddressGroup())); return builder.build(); }) .build(); GrpcCallSettings<DeleteAddressGroupRequest, Operation> deleteAddressGroupTransportSettings = GrpcCallSettings.<DeleteAddressGroupRequest, Operation>newBuilder() .setMethodDescriptor(deleteAddressGroupMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse> listAddressGroupReferencesTransportSettings = GrpcCallSettings .<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse>newBuilder() .setMethodDescriptor(listAddressGroupReferencesMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("address_group", String.valueOf(request.getAddressGroup())); return builder.build(); }) .build(); GrpcCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = GrpcCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<GetLocationRequest, Location> getLocationTransportSettings = GrpcCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings = GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(setIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings = GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder() .setMethodDescriptor(getIamPolicyMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsTransportSettings = GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder() .setMethodDescriptor(testIamPermissionsMethodDescriptor) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("resource", String.valueOf(request.getResource())); return builder.build(); }) .build(); this.listAddressGroupsCallable = callableFactory.createUnaryCallable( listAddressGroupsTransportSettings, settings.listAddressGroupsSettings(), clientContext); this.listAddressGroupsPagedCallable = callableFactory.createPagedCallable( listAddressGroupsTransportSettings, settings.listAddressGroupsSettings(), clientContext); this.getAddressGroupCallable = callableFactory.createUnaryCallable( getAddressGroupTransportSettings, settings.getAddressGroupSettings(), clientContext); this.createAddressGroupCallable = callableFactory.createUnaryCallable( createAddressGroupTransportSettings, settings.createAddressGroupSettings(), clientContext); this.createAddressGroupOperationCallable = callableFactory.createOperationCallable( createAddressGroupTransportSettings, settings.createAddressGroupOperationSettings(), clientContext, operationsStub); this.updateAddressGroupCallable = callableFactory.createUnaryCallable( updateAddressGroupTransportSettings, settings.updateAddressGroupSettings(), clientContext); this.updateAddressGroupOperationCallable = callableFactory.createOperationCallable( updateAddressGroupTransportSettings, settings.updateAddressGroupOperationSettings(), clientContext, operationsStub); this.addAddressGroupItemsCallable = callableFactory.createUnaryCallable( addAddressGroupItemsTransportSettings, settings.addAddressGroupItemsSettings(), clientContext); this.addAddressGroupItemsOperationCallable = callableFactory.createOperationCallable( addAddressGroupItemsTransportSettings, settings.addAddressGroupItemsOperationSettings(), clientContext, operationsStub); this.removeAddressGroupItemsCallable = callableFactory.createUnaryCallable( removeAddressGroupItemsTransportSettings, settings.removeAddressGroupItemsSettings(), clientContext); this.removeAddressGroupItemsOperationCallable = callableFactory.createOperationCallable( removeAddressGroupItemsTransportSettings, settings.removeAddressGroupItemsOperationSettings(), clientContext, operationsStub); this.cloneAddressGroupItemsCallable = callableFactory.createUnaryCallable( cloneAddressGroupItemsTransportSettings, settings.cloneAddressGroupItemsSettings(), clientContext); this.cloneAddressGroupItemsOperationCallable = callableFactory.createOperationCallable( cloneAddressGroupItemsTransportSettings, settings.cloneAddressGroupItemsOperationSettings(), clientContext, operationsStub); this.deleteAddressGroupCallable = callableFactory.createUnaryCallable( deleteAddressGroupTransportSettings, settings.deleteAddressGroupSettings(), clientContext); this.deleteAddressGroupOperationCallable = callableFactory.createOperationCallable( deleteAddressGroupTransportSettings, settings.deleteAddressGroupOperationSettings(), clientContext, operationsStub); this.listAddressGroupReferencesCallable = callableFactory.createUnaryCallable( listAddressGroupReferencesTransportSettings, settings.listAddressGroupReferencesSettings(), clientContext); this.listAddressGroupReferencesPagedCallable = callableFactory.createPagedCallable( listAddressGroupReferencesTransportSettings, settings.listAddressGroupReferencesSettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.setIamPolicyCallable = callableFactory.createUnaryCallable( setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext); this.getIamPolicyCallable = callableFactory.createUnaryCallable( getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext); this.testIamPermissionsCallable = callableFactory.createUnaryCallable( testIamPermissionsTransportSettings, settings.testIamPermissionsSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsResponse> listAddressGroupsCallable() { return listAddressGroupsCallable; } @Override public UnaryCallable<ListAddressGroupsRequest, ListAddressGroupsPagedResponse> listAddressGroupsPagedCallable() { return listAddressGroupsPagedCallable; } @Override public UnaryCallable<GetAddressGroupRequest, AddressGroup> getAddressGroupCallable() { return getAddressGroupCallable; } @Override public UnaryCallable<CreateAddressGroupRequest, Operation> createAddressGroupCallable() { return createAddressGroupCallable; } @Override public OperationCallable<CreateAddressGroupRequest, AddressGroup, OperationMetadata> createAddressGroupOperationCallable() { return createAddressGroupOperationCallable; } @Override public UnaryCallable<UpdateAddressGroupRequest, Operation> updateAddressGroupCallable() { return updateAddressGroupCallable; } @Override public OperationCallable<UpdateAddressGroupRequest, AddressGroup, OperationMetadata> updateAddressGroupOperationCallable() { return updateAddressGroupOperationCallable; } @Override public UnaryCallable<AddAddressGroupItemsRequest, Operation> addAddressGroupItemsCallable() { return addAddressGroupItemsCallable; } @Override public OperationCallable<AddAddressGroupItemsRequest, AddressGroup, OperationMetadata> addAddressGroupItemsOperationCallable() { return addAddressGroupItemsOperationCallable; } @Override public UnaryCallable<RemoveAddressGroupItemsRequest, Operation> removeAddressGroupItemsCallable() { return removeAddressGroupItemsCallable; } @Override public OperationCallable<RemoveAddressGroupItemsRequest, AddressGroup, OperationMetadata> removeAddressGroupItemsOperationCallable() { return removeAddressGroupItemsOperationCallable; } @Override public UnaryCallable<CloneAddressGroupItemsRequest, Operation> cloneAddressGroupItemsCallable() { return cloneAddressGroupItemsCallable; } @Override public OperationCallable<CloneAddressGroupItemsRequest, AddressGroup, OperationMetadata> cloneAddressGroupItemsOperationCallable() { return cloneAddressGroupItemsOperationCallable; } @Override public UnaryCallable<DeleteAddressGroupRequest, Operation> deleteAddressGroupCallable() { return deleteAddressGroupCallable; } @Override public OperationCallable<DeleteAddressGroupRequest, Empty, OperationMetadata> deleteAddressGroupOperationCallable() { return deleteAddressGroupOperationCallable; } @Override public UnaryCallable<ListAddressGroupReferencesRequest, ListAddressGroupReferencesResponse> listAddressGroupReferencesCallable() { return listAddressGroupReferencesCallable; } @Override public UnaryCallable<ListAddressGroupReferencesRequest, ListAddressGroupReferencesPagedResponse> listAddressGroupReferencesPagedCallable() { return listAddressGroupReferencesPagedCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() { return setIamPolicyCallable; } @Override public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() { return getIamPolicyCallable; } @Override public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsCallable() { return testIamPermissionsCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/qpid-broker-j
35,684
systests/protocol-tests-amqp-0-10/src/test/java/org/apache/qpid/tests/protocol/v0_10/QueueTest.java
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.qpid.tests.protocol.v0_10; import static java.nio.charset.StandardCharsets.UTF_8; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assumptions.assumeTrue; import org.junit.jupiter.api.Test; import org.apache.qpid.server.protocol.v0_10.transport.ExecutionErrorCode; import org.apache.qpid.server.protocol.v0_10.transport.ExecutionException; import org.apache.qpid.server.protocol.v0_10.transport.ExecutionResult; import org.apache.qpid.server.protocol.v0_10.transport.QueueQueryResult; import org.apache.qpid.server.protocol.v0_10.transport.SessionCommandPoint; import org.apache.qpid.server.protocol.v0_10.transport.SessionCompleted; import org.apache.qpid.server.protocol.v0_10.transport.SessionDetached; import org.apache.qpid.server.protocol.v0_10.transport.SessionFlush; import org.apache.qpid.tests.protocol.SpecificationTest; import org.apache.qpid.tests.utils.BrokerAdmin; import org.apache.qpid.tests.utils.BrokerAdminUsingTestBase; public class QueueTest extends BrokerAdminUsingTestBase { private static final byte[] SESSION_NAME = "test".getBytes(UTF_8); @Test @SpecificationTest(section = "10.queue.declare", description = "This command creates or checks a queue.") public void queueDeclare() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); SessionCompleted completed = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declare() .session() .flushCompleted() .flush() .consumeResponse() .getLatestResponse(SessionCompleted.class); assertThat(completed.getCommands().includes(0), is(equalTo(true))); assertThat(getBrokerAdmin().getQueueDepthMessages(BrokerAdmin.TEST_QUEUE_NAME), is(equalTo(0))); } } @Test @SpecificationTest(section = "10.queue.declare", description = "The alternate-exchange field specifies how messages on this queue should be treated when " + "they are rejected by a subscriber, or when they are orphaned by queue deletion.") public void queueDeclareWithAlternateExchange() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareAlternateExchange("amq.direct") .declareId(0) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); } } @Test @SpecificationTest(section = "10.queue.declare", description = "if the alternate-exchange does not match the name of any existing exchange on the server, " + "then an exception must be raised.") public void queueDeclareAlternateExchangeNotFound() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); ExecutionException response = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareAlternateExchange("unknownExchange") .declareId(0) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.declare", description = "The client MAY ask the server to assert that a queue exists without creating the queue if " + "not.") public void queueDeclarePassive() throws Exception { getBrokerAdmin().createQueue(BrokerAdmin.TEST_QUEUE_NAME); try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declarePassive(true) .declareId(0) .declare() .session() .flushCompleted() .flush() .consumeResponse() .getLatestResponse(SessionCompleted.class); } } @Test @SpecificationTest(section = "10.queue.declare", description = "[...] If the queue does not exist, the server treats this as a failure.") public void queueDeclarePassiveQueueNotFound() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); ExecutionException response = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declarePassive(true) .declareId(0) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.declare", description = "If set when creating a new queue, the queue will be marked as durable. Durable queues " + "remain active when a server restarts.") public void queueDeclareDurable() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareDurable(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); } assumeTrue(getBrokerAdmin().supportsRestart()); getBrokerAdmin().restart(); try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declarePassive(true) .declareId(0) .declare() .session() .flushCompleted() .flush() .consumeResponse() .getLatestResponse(SessionCompleted.class); } } @Test @SpecificationTest(section = "10.queue.declare", description = "If the server receives a declare, bind, consume or get request for a queue that has been" + "declared as exclusive by an existing client session, it MUST raise an exception.") public void queueDeclareAttemptedConsumeOfExclusivelyDeclaredQueue() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareExclusive(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); try (FrameTransport transport2 = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction2 = transport2.newInteraction(); ExecutionException response = interaction2.negotiateOpen() .channelId(1) .attachSession("test2".getBytes(UTF_8)) .message() .subscribeDestination("mysub") .subscribeQueue(BrokerAdmin.TEST_QUEUE_NAME) .subscribeId(0) .subscribe() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.RESOURCE_LOCKED))); } } try (FrameTransport transport2 = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction2 = transport2.newInteraction(); interaction2.negotiateOpen() .channelId(1) .attachSession("test2".getBytes(UTF_8)) .message() .subscribeDestination("mysub") .subscribeQueue(BrokerAdmin.TEST_QUEUE_NAME) .subscribeId(0) .subscribe() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); } } @Test @SpecificationTest(section = "10.queue.declare", description = "If the server receives a declare, bind, consume or get request for a queue that has been" + "declared as exclusive by an existing client session, it MUST raise an exception.") public void queueDeclareRedeclareOfExclusivelyDeclaredQueue() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareExclusive(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); try (FrameTransport transport2 = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction2 = transport2.newInteraction(); ExecutionException response = interaction2.negotiateOpen() .channelId(1) .attachSession("test2".getBytes(UTF_8)) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareExclusive(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.RESOURCE_LOCKED))); } } try (FrameTransport transport2 = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction2 = transport2.newInteraction(); interaction2.negotiateOpen() .channelId(1) .attachSession("test2".getBytes(UTF_8)) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareExclusive(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); } } @Test @SpecificationTest(section = "10.queue.declare", description = "If this field [auto-delete] is set and the exclusive field is also set, then the queue " + "MUST be deleted when the session closes.") public void queueDeclareAutoDeleteAndExclusiveDeletedBySessionDetach() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareExclusive(true) .declareAutoDelete(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class) .session() .detachName(SESSION_NAME) .detach() .consumeResponse(SessionDetached.class); ExecutionException response = interaction.channelId(2) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declarePassive(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.declare", description = "If this field is set and the exclusive field is not set the queue is deleted when all the " + "consumers have finished using it. Last consumer can be cancelled either explicitly or " + "because its session is closed.") public void queueDeclareAutoDeleteDeletedByLastConsumerCancelled() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declareAutoDelete(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); } try (FrameTransport transport2 = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction2 = transport2.newInteraction(); String subscriberName = "mysub"; interaction2.negotiateOpen() .channelId(1) .attachSession("test2".getBytes(UTF_8)) .queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(0) .declarePassive(true) .declare() .message() .subscribeDestination(subscriberName) .subscribeQueue(BrokerAdmin.TEST_QUEUE_NAME) .subscribeId(1) .subscribe() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class) .message() .cancelId(2) .cancelDestination(subscriberName) .cancel() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); ExecutionException response = interaction2.queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declareId(3) .declarePassive(true) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.delete", description = "This command deletes a queue.") public void queueDelete() throws Exception { getBrokerAdmin().createQueue(BrokerAdmin.TEST_QUEUE_NAME); try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .deleteQueue(BrokerAdmin.TEST_QUEUE_NAME) .deleteId(0) .delete() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); ExecutionException response = interaction.queue() .declareQueue(BrokerAdmin.TEST_QUEUE_NAME) .declarePassive(true) .declareId(1) .declare() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.delete", description = "The queue must exist. If the client attempts to delete a non-existing queue the server " + "MUST raise an exception.") public void queueDeleteQueueNotFound() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); ExecutionException response = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .deleteQueue(BrokerAdmin.TEST_QUEUE_NAME) .deleteId(0) .delete() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.delete", description = "If set, the server will only delete the queue if it has no consumers. If the queue has " + "consumers the server does does not delete it but raises an exception instead.") public void queueDeleteQueueDeleteWithConsumer() throws Exception { getBrokerAdmin().createQueue(BrokerAdmin.TEST_QUEUE_NAME); try (FrameTransport consumerTransport = new FrameTransport(getBrokerAdmin()).connect(); FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction consumerInteraction = consumerTransport.newInteraction(); String subscriberName = "mysub"; consumerInteraction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .message() .subscribeDestination(subscriberName) .subscribeQueue(BrokerAdmin.TEST_QUEUE_NAME) .subscribeId(1) .subscribe() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); final Interaction interaction = transport.newInteraction(); ExecutionException response = interaction.negotiateOpen() .channelId(1) .attachSession("test2".getBytes(UTF_8)) .queue() .deleteQueue(BrokerAdmin.TEST_QUEUE_NAME) .deleteId(0) .deleteIfUnused(true) .delete() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.PRECONDITION_FAILED))); consumerInteraction.message() .cancelId(2) .cancelDestination(subscriberName) .cancel() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); consumerInteraction.queue() .deleteQueue(BrokerAdmin.TEST_QUEUE_NAME) .deleteId(0) .deleteIfUnused(true) .delete() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class); } } @Test @SpecificationTest(section = "10.queue.purge", description = "This command removes all messages from a queue.") public void queuePurge() throws Exception { getBrokerAdmin().createQueue(BrokerAdmin.TEST_QUEUE_NAME); getBrokerAdmin().putMessageOnQueue(BrokerAdmin.TEST_QUEUE_NAME, "message"); try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); ExecutionResult result = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .queryQueue(BrokerAdmin.TEST_QUEUE_NAME) .queryId(1) .query() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse().getLatestResponse(ExecutionResult.class); assertThat(((QueueQueryResult) result.getValue()).getMessageCount(), is(1L)); interaction.queue() .purgeQueue(BrokerAdmin.TEST_QUEUE_NAME) .purgeId(0) .purge() .session() .flushCompleted() .flush() .consumeResponse(SessionFlush.class) .consumeResponse(SessionCompleted.class); result = interaction.queue() .queryQueue(BrokerAdmin.TEST_QUEUE_NAME) .queryId(1) .query() .session() .flushCompleted() .flush() .consumeResponse(SessionCompleted.class) .consumeResponse().getLatestResponse(ExecutionResult.class); assertThat(((QueueQueryResult) result.getValue()).getMessageCount(), is(0L)); } } @Test @SpecificationTest(section = "10.queue.purge", description = "The queue must exist. If the client attempts to purge a non-existing queue the server " + "MUST raise an exception.") public void queuePurgeQueueNotFound() throws Exception { try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); ExecutionException response = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .purgeQueue(BrokerAdmin.TEST_QUEUE_NAME) .purgeId(0) .purge() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse() .getLatestResponse(ExecutionException.class); assertThat(response.getErrorCode(), is(equalTo(ExecutionErrorCode.NOT_FOUND))); } } @Test @SpecificationTest(section = "10.queue.query", description = "This command requests information about a queue.") public void queueQuery() throws Exception { getBrokerAdmin().createQueue(BrokerAdmin.TEST_QUEUE_NAME); getBrokerAdmin().putMessageOnQueue(BrokerAdmin.TEST_QUEUE_NAME, "message"); try (FrameTransport transport = new FrameTransport(getBrokerAdmin()).connect()) { final Interaction interaction = transport.newInteraction(); ExecutionResult result = interaction.negotiateOpen() .channelId(1) .attachSession(SESSION_NAME) .queue() .queryQueue(BrokerAdmin.TEST_QUEUE_NAME) .queryId(0) .query() .session() .flushCompleted() .flush() .consumeResponse(SessionCommandPoint.class) .consumeResponse().getLatestResponse(ExecutionResult.class); QueueQueryResult queryResult = (QueueQueryResult) result.getValue(); assertThat(queryResult.getQueue(), is(equalTo(BrokerAdmin.TEST_QUEUE_NAME))); assertThat(queryResult.getAlternateExchange(), is(nullValue())); assertThat(queryResult.getMessageCount(), is(1L)); } } }
apache/hadoop-mapreduce
35,116
src/test/mapred/org/apache/hadoop/hdfs/NNBench.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import java.io.IOException; import java.util.Date; import java.io.DataInputStream; import java.io.FileOutputStream; import java.io.InputStreamReader; import java.io.PrintStream; import java.io.File; import java.io.BufferedReader; import java.util.StringTokenizer; import java.net.InetAddress; import java.text.SimpleDateFormat; import java.util.Iterator; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.Log; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.mapred.FileInputFormat; import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reducer; /** * This program executes a specified operation that applies load to * the NameNode. * * When run simultaneously on multiple nodes, this program functions * as a stress-test and benchmark for namenode, especially when * the number of bytes written to each file is small. * * Valid operations are: * create_write * open_read * rename * delete * * NOTE: The open_read, rename and delete operations assume that the files * they operate on are already available. The create_write operation * must be run before running the other operations. */ public class NNBench { private static final Log LOG = LogFactory.getLog( "org.apache.hadoop.hdfs.NNBench"); protected static String CONTROL_DIR_NAME = "control"; protected static String OUTPUT_DIR_NAME = "output"; protected static String DATA_DIR_NAME = "data"; protected static final String DEFAULT_RES_FILE_NAME = "NNBench_results.log"; protected static final String NNBENCH_VERSION = "NameNode Benchmark 0.4"; public static String operation = "none"; public static long numberOfMaps = 1l; // default is 1 public static long numberOfReduces = 1l; // default is 1 public static long startTime = System.currentTimeMillis() + (120 * 1000); // default is 'now' + 2min public static long blockSize = 1l; // default is 1 public static int bytesToWrite = 0; // default is 0 public static long bytesPerChecksum = 1l; // default is 1 public static long numberOfFiles = 1l; // default is 1 public static short replicationFactorPerFile = 1; // default is 1 public static String baseDir = "/benchmarks/NNBench"; // default public static boolean readFileAfterOpen = false; // default is to not read // Supported operations private static final String OP_CREATE_WRITE = "create_write"; private static final String OP_OPEN_READ = "open_read"; private static final String OP_RENAME = "rename"; private static final String OP_DELETE = "delete"; // To display in the format that matches the NN and DN log format // Example: 2007-10-26 00:01:19,853 static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd' 'HH:mm:ss','S"); private static Configuration config = new Configuration(); /** * Clean up the files before a test run * * @throws IOException on error */ private static void cleanupBeforeTestrun() throws IOException { FileSystem tempFS = FileSystem.get(config); // Delete the data directory only if it is the create/write operation if (operation.equals(OP_CREATE_WRITE)) { LOG.info("Deleting data directory"); tempFS.delete(new Path(baseDir, DATA_DIR_NAME), true); } tempFS.delete(new Path(baseDir, CONTROL_DIR_NAME), true); tempFS.delete(new Path(baseDir, OUTPUT_DIR_NAME), true); } /** * Create control files before a test run. * Number of files created is equal to the number of maps specified * * @throws IOException on error */ private static void createControlFiles() throws IOException { FileSystem tempFS = FileSystem.get(config); LOG.info("Creating " + numberOfMaps + " control files"); for (int i = 0; i < numberOfMaps; i++) { String strFileName = "NNBench_Controlfile_" + i; Path filePath = new Path(new Path(baseDir, CONTROL_DIR_NAME), strFileName); SequenceFile.Writer writer = null; try { writer = SequenceFile.createWriter(tempFS, config, filePath, Text.class, LongWritable.class, CompressionType.NONE); writer.append(new Text(strFileName), new LongWritable(0l)); } finally { if (writer != null) { writer.close(); } } } } /** * Display version */ private static void displayVersion() { System.out.println(NNBENCH_VERSION); } /** * Display usage */ private static void displayUsage() { String usage = "Usage: nnbench <options>\n" + "Options:\n" + "\t-operation <Available operations are " + OP_CREATE_WRITE + " " + OP_OPEN_READ + " " + OP_RENAME + " " + OP_DELETE + ". " + "This option is mandatory>\n" + "\t * NOTE: The open_read, rename and delete operations assume " + "that the files they operate on, are already available. " + "The create_write operation must be run before running the " + "other operations.\n" + "\t-maps <number of maps. default is 1. This is not mandatory>\n" + "\t-reduces <number of reduces. default is 1. This is not mandatory>\n" + "\t-startTime <time to start, given in seconds from the epoch. " + "Make sure this is far enough into the future, so all maps " + "(operations) will start at the same time>. " + "default is launch time + 2 mins. This is not mandatory \n" + "\t-blockSize <Block size in bytes. default is 1. " + "This is not mandatory>\n" + "\t-bytesToWrite <Bytes to write. default is 0. " + "This is not mandatory>\n" + "\t-bytesPerChecksum <Bytes per checksum for the files. default is 1. " + "This is not mandatory>\n" + "\t-numberOfFiles <number of files to create. default is 1. " + "This is not mandatory>\n" + "\t-replicationFactorPerFile <Replication factor for the files." + " default is 1. This is not mandatory>\n" + "\t-baseDir <base DFS path. default is /becnhmarks/NNBench. " + "This is not mandatory>\n" + "\t-readFileAfterOpen <true or false. if true, it reads the file and " + "reports the average time to read. This is valid with the open_read " + "operation. default is false. This is not mandatory>\n" + "\t-help: Display the help statement\n"; System.out.println(usage); } /** * check for arguments and fail if the values are not specified * @param index positional number of an argument in the list of command * line's arguments * @param length total number of arguments */ public static void checkArgs(final int index, final int length) { if (index == length) { displayUsage(); System.exit(-1); } } /** * Parse input arguments * * @param args array of command line's parameters to be parsed */ public static void parseInputs(final String[] args) { // If there are no command line arguments, exit if (args.length == 0) { displayUsage(); System.exit(-1); } // Parse command line args for (int i = 0; i < args.length; i++) { if (args[i].equals("-operation")) { operation = args[++i]; } else if (args[i].equals("-maps")) { checkArgs(i + 1, args.length); numberOfMaps = Long.parseLong(args[++i]); } else if (args[i].equals("-reduces")) { checkArgs(i + 1, args.length); numberOfReduces = Long.parseLong(args[++i]); } else if (args[i].equals("-startTime")) { checkArgs(i + 1, args.length); startTime = Long.parseLong(args[++i]) * 1000; } else if (args[i].equals("-blockSize")) { checkArgs(i + 1, args.length); blockSize = Long.parseLong(args[++i]); } else if (args[i].equals("-bytesToWrite")) { checkArgs(i + 1, args.length); bytesToWrite = Integer.parseInt(args[++i]); } else if (args[i].equals("-bytesPerChecksum")) { checkArgs(i + 1, args.length); bytesPerChecksum = Long.parseLong(args[++i]); } else if (args[i].equals("-numberOfFiles")) { checkArgs(i + 1, args.length); numberOfFiles = Long.parseLong(args[++i]); } else if (args[i].equals("-replicationFactorPerFile")) { checkArgs(i + 1, args.length); replicationFactorPerFile = Short.parseShort(args[++i]); } else if (args[i].equals("-baseDir")) { checkArgs(i + 1, args.length); baseDir = args[++i]; } else if (args[i].equals("-readFileAfterOpen")) { checkArgs(i + 1, args.length); readFileAfterOpen = Boolean.parseBoolean(args[++i]); } else if (args[i].equals("-help")) { displayUsage(); System.exit(-1); } } LOG.info("Test Inputs: "); LOG.info(" Test Operation: " + operation); LOG.info(" Start time: " + sdf.format(new Date(startTime))); LOG.info(" Number of maps: " + numberOfMaps); LOG.info(" Number of reduces: " + numberOfReduces); LOG.info(" Block Size: " + blockSize); LOG.info(" Bytes to write: " + bytesToWrite); LOG.info(" Bytes per checksum: " + bytesPerChecksum); LOG.info(" Number of files: " + numberOfFiles); LOG.info(" Replication factor: " + replicationFactorPerFile); LOG.info(" Base dir: " + baseDir); LOG.info(" Read file after open: " + readFileAfterOpen); // Set user-defined parameters, so the map method can access the values config.set("test.nnbench.operation", operation); config.setLong("test.nnbench.maps", numberOfMaps); config.setLong("test.nnbench.reduces", numberOfReduces); config.setLong("test.nnbench.starttime", startTime); config.setLong("test.nnbench.blocksize", blockSize); config.setInt("test.nnbench.bytestowrite", bytesToWrite); config.setLong("test.nnbench.bytesperchecksum", bytesPerChecksum); config.setLong("test.nnbench.numberoffiles", numberOfFiles); config.setInt("test.nnbench.replicationfactor", (int) replicationFactorPerFile); config.set("test.nnbench.basedir", baseDir); config.setBoolean("test.nnbench.readFileAfterOpen", readFileAfterOpen); config.set("test.nnbench.datadir.name", DATA_DIR_NAME); config.set("test.nnbench.outputdir.name", OUTPUT_DIR_NAME); config.set("test.nnbench.controldir.name", CONTROL_DIR_NAME); } /** * Analyze the results * * @throws IOException on error */ private static void analyzeResults() throws IOException { final FileSystem fs = FileSystem.get(config); Path reduceFile = new Path(new Path(baseDir, OUTPUT_DIR_NAME), "part-00000"); DataInputStream in; in = new DataInputStream(fs.open(reduceFile)); BufferedReader lines; lines = new BufferedReader(new InputStreamReader(in)); long totalTimeAL1 = 0l; long totalTimeAL2 = 0l; long totalTimeTPmS = 0l; long lateMaps = 0l; long numOfExceptions = 0l; long successfulFileOps = 0l; long mapStartTimeTPmS = 0l; long mapEndTimeTPmS = 0l; String resultTPSLine1 = null; String resultTPSLine2 = null; String resultALLine1 = null; String resultALLine2 = null; String line; while((line = lines.readLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%;"); String attr = tokens.nextToken(); if (attr.endsWith(":totalTimeAL1")) { totalTimeAL1 = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":totalTimeAL2")) { totalTimeAL2 = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":totalTimeTPmS")) { totalTimeTPmS = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":latemaps")) { lateMaps = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":numOfExceptions")) { numOfExceptions = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":successfulFileOps")) { successfulFileOps = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":mapStartTimeTPmS")) { mapStartTimeTPmS = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":mapEndTimeTPmS")) { mapEndTimeTPmS = Long.parseLong(tokens.nextToken()); } } // Average latency is the average time to perform 'n' number of // operations, n being the number of files double avgLatency1 = (double) totalTimeAL1 / successfulFileOps; double avgLatency2 = (double) totalTimeAL2 / successfulFileOps; // The time it takes for the longest running map is measured. Using that, // cluster transactions per second is calculated. It includes time to // retry any of the failed operations double longestMapTimeTPmS = (double) (mapEndTimeTPmS - mapStartTimeTPmS); double totalTimeTPS = (longestMapTimeTPmS == 0) ? (1000 * successfulFileOps) : (double) (1000 * successfulFileOps) / longestMapTimeTPmS; // The time it takes to perform 'n' operations is calculated (in ms), // n being the number of files. Using that time, the average execution // time is calculated. It includes time to retry any of the // failed operations double AverageExecutionTime = (totalTimeTPmS == 0) ? (double) successfulFileOps : (double) totalTimeTPmS / successfulFileOps; if (operation.equals(OP_CREATE_WRITE)) { // For create/write/close, it is treated as two transactions, // since a file create from a client perspective involves create and close resultTPSLine1 = " TPS: Create/Write/Close: " + (int) (totalTimeTPS * 2); resultTPSLine2 = "Avg exec time (ms): Create/Write/Close: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Create/Write: " + avgLatency1; resultALLine2 = " Avg Lat (ms): Close: " + avgLatency2; } else if (operation.equals(OP_OPEN_READ)) { resultTPSLine1 = " TPS: Open/Read: " + (int) totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Open/Read: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Open: " + avgLatency1; if (readFileAfterOpen) { resultALLine2 = " Avg Lat (ms): Read: " + avgLatency2; } } else if (operation.equals(OP_RENAME)) { resultTPSLine1 = " TPS: Rename: " + (int) totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Rename: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Rename: " + avgLatency1; } else if (operation.equals(OP_DELETE)) { resultTPSLine1 = " TPS: Delete: " + (int) totalTimeTPS; resultTPSLine2 = " Avg Exec time (ms): Delete: " + AverageExecutionTime; resultALLine1 = " Avg Lat (ms): Delete: " + avgLatency1; } String resultLines[] = { "-------------- NNBench -------------- : ", " Version: " + NNBENCH_VERSION, " Date & time: " + sdf.format(new Date( System.currentTimeMillis())), "", " Test Operation: " + operation, " Start time: " + sdf.format(new Date(startTime)), " Maps to run: " + numberOfMaps, " Reduces to run: " + numberOfReduces, " Block Size (bytes): " + blockSize, " Bytes to write: " + bytesToWrite, " Bytes per checksum: " + bytesPerChecksum, " Number of files: " + numberOfFiles, " Replication factor: " + replicationFactorPerFile, " Successful file operations: " + successfulFileOps, "", " # maps that missed the barrier: " + lateMaps, " # exceptions: " + numOfExceptions, "", resultTPSLine1, resultTPSLine2, resultALLine1, resultALLine2, "", " RAW DATA: AL Total #1: " + totalTimeAL1, " RAW DATA: AL Total #2: " + totalTimeAL2, " RAW DATA: TPS Total (ms): " + totalTimeTPmS, " RAW DATA: Longest Map Time (ms): " + longestMapTimeTPmS, " RAW DATA: Late maps: " + lateMaps, " RAW DATA: # of exceptions: " + numOfExceptions, "" }; PrintStream res = new PrintStream(new FileOutputStream( new File(DEFAULT_RES_FILE_NAME), true)); // Write to a file and also dump to log for(int i = 0; i < resultLines.length; i++) { LOG.info(resultLines[i]); res.println(resultLines[i]); } } /** * Run the test * * @throws IOException on error */ public static void runTests() throws IOException { config.setLong("io.bytes.per.checksum", bytesPerChecksum); JobConf job = new JobConf(config, NNBench.class); job.setJobName("NNBench-" + operation); FileInputFormat.setInputPaths(job, new Path(baseDir, CONTROL_DIR_NAME)); job.setInputFormat(SequenceFileInputFormat.class); // Explicitly set number of max map attempts to 1. job.setMaxMapAttempts(1); // Explicitly turn off speculative execution job.setSpeculativeExecution(false); job.setMapperClass(NNBenchMapper.class); job.setReducerClass(NNBenchReducer.class); FileOutputFormat.setOutputPath(job, new Path(baseDir, OUTPUT_DIR_NAME)); job.setOutputKeyClass(Text.class); job.setOutputValueClass(Text.class); job.setNumReduceTasks((int) numberOfReduces); JobClient.runJob(job); } /** * Validate the inputs */ public static void validateInputs() { // If it is not one of the four operations, then fail if (!operation.equals(OP_CREATE_WRITE) && !operation.equals(OP_OPEN_READ) && !operation.equals(OP_RENAME) && !operation.equals(OP_DELETE)) { System.err.println("Error: Unknown operation: " + operation); displayUsage(); System.exit(-1); } // If number of maps is a negative number, then fail // Hadoop allows the number of maps to be 0 if (numberOfMaps < 0) { System.err.println("Error: Number of maps must be a positive number"); displayUsage(); System.exit(-1); } // If number of reduces is a negative number or 0, then fail if (numberOfReduces <= 0) { System.err.println("Error: Number of reduces must be a positive number"); displayUsage(); System.exit(-1); } // If blocksize is a negative number or 0, then fail if (blockSize <= 0) { System.err.println("Error: Block size must be a positive number"); displayUsage(); System.exit(-1); } // If bytes to write is a negative number, then fail if (bytesToWrite < 0) { System.err.println("Error: Bytes to write must be a positive number"); displayUsage(); System.exit(-1); } // If bytes per checksum is a negative number, then fail if (bytesPerChecksum < 0) { System.err.println("Error: Bytes per checksum must be a positive number"); displayUsage(); System.exit(-1); } // If number of files is a negative number, then fail if (numberOfFiles < 0) { System.err.println("Error: Number of files must be a positive number"); displayUsage(); System.exit(-1); } // If replication factor is a negative number, then fail if (replicationFactorPerFile < 0) { System.err.println("Error: Replication factor must be a positive number"); displayUsage(); System.exit(-1); } // If block size is not a multiple of bytesperchecksum, fail if (blockSize % bytesPerChecksum != 0) { System.err.println("Error: Block Size in bytes must be a multiple of " + "bytes per checksum: "); displayUsage(); System.exit(-1); } } /** * Main method for running the NNBench benchmarks * * @param args array of command line arguments * @throws IOException indicates a problem with test startup */ public static void main(String[] args) throws IOException { // Display the application version string displayVersion(); // Parse the inputs parseInputs(args); // Validate inputs validateInputs(); // Clean up files before the test run cleanupBeforeTestrun(); // Create control files before test run createControlFiles(); // Run the tests as a map reduce job runTests(); // Analyze results analyzeResults(); } /** * Mapper class */ static class NNBenchMapper extends Configured implements Mapper<Text, LongWritable, Text, Text> { FileSystem filesystem = null; private String hostName = null; long numberOfFiles = 1l; long blkSize = 1l; short replFactor = 1; int bytesToWrite = 0; String baseDir = null; String dataDirName = null; String op = null; boolean readFile = false; final int MAX_OPERATION_EXCEPTIONS = 1000; // Data to collect from the operation int numOfExceptions = 0; long startTimeAL = 0l; long totalTimeAL1 = 0l; long totalTimeAL2 = 0l; long successfulFileOps = 0l; /** * Constructor */ public NNBenchMapper() { } /** * Mapper base implementation */ public void configure(JobConf conf) { setConf(conf); try { filesystem = FileSystem.get(conf); } catch(Exception e) { throw new RuntimeException("Cannot get file system.", e); } try { hostName = InetAddress.getLocalHost().getHostName(); } catch(Exception e) { throw new RuntimeException("Error getting hostname", e); } } /** * Mapper base implementation */ public void close() throws IOException { } /** * Returns when the current number of seconds from the epoch equals * the command line argument given by <code>-startTime</code>. * This allows multiple instances of this program, running on clock * synchronized nodes, to start at roughly the same time. * @return true if the method was able to sleep for <code>-startTime</code> * without interruption; false otherwise */ private boolean barrier() { long startTime = getConf().getLong("test.nnbench.starttime", 0l); long currentTime = System.currentTimeMillis(); long sleepTime = startTime - currentTime; boolean retVal = false; // If the sleep time is greater than 0, then sleep and return if (sleepTime > 0) { LOG.info("Waiting in barrier for: " + sleepTime + " ms"); try { Thread.sleep(sleepTime); retVal = true; } catch (Exception e) { retVal = false; } } return retVal; } /** * Map method */ public void map(Text key, LongWritable value, OutputCollector<Text, Text> output, Reporter reporter) throws IOException { Configuration conf = filesystem.getConf(); numberOfFiles = conf.getLong("test.nnbench.numberoffiles", 1l); blkSize = conf.getLong("test.nnbench.blocksize", 1l); replFactor = (short) (conf.getInt("test.nnbench.replicationfactor", 1)); bytesToWrite = conf.getInt("test.nnbench.bytestowrite", 0); baseDir = conf.get("test.nnbench.basedir"); dataDirName = conf.get("test.nnbench.datadir.name"); op = conf.get("test.nnbench.operation"); readFile = conf.getBoolean("test.nnbench.readFileAfterOpen", false); long totalTimeTPmS = 0l; long startTimeTPmS = 0l; long endTimeTPms = 0l; numOfExceptions = 0; startTimeAL = 0l; totalTimeAL1 = 0l; totalTimeAL2 = 0l; successfulFileOps = 0l; if (barrier()) { if (op.equals(OP_CREATE_WRITE)) { startTimeTPmS = System.currentTimeMillis(); doCreateWriteOp("file_" + hostName + "_", reporter); } else if (op.equals(OP_OPEN_READ)) { startTimeTPmS = System.currentTimeMillis(); doOpenReadOp("file_" + hostName + "_", reporter); } else if (op.equals(OP_RENAME)) { startTimeTPmS = System.currentTimeMillis(); doRenameOp("file_" + hostName + "_", reporter); } else if (op.equals(OP_DELETE)) { startTimeTPmS = System.currentTimeMillis(); doDeleteOp("file_" + hostName + "_", reporter); } endTimeTPms = System.currentTimeMillis(); totalTimeTPmS = endTimeTPms - startTimeTPmS; } else { output.collect(new Text("l:latemaps"), new Text("1")); } // collect after the map end time is measured output.collect(new Text("l:totalTimeAL1"), new Text(String.valueOf(totalTimeAL1))); output.collect(new Text("l:totalTimeAL2"), new Text(String.valueOf(totalTimeAL2))); output.collect(new Text("l:numOfExceptions"), new Text(String.valueOf(numOfExceptions))); output.collect(new Text("l:successfulFileOps"), new Text(String.valueOf(successfulFileOps))); output.collect(new Text("l:totalTimeTPmS"), new Text(String.valueOf(totalTimeTPmS))); output.collect(new Text("min:mapStartTimeTPmS"), new Text(String.valueOf(startTimeTPmS))); output.collect(new Text("max:mapEndTimeTPmS"), new Text(String.valueOf(endTimeTPms))); } /** * Create and Write operation. * @param name of the prefix of the putput file to be created * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doCreateWriteOp(String name, Reporter reporter) { FSDataOutputStream out; byte[] buffer = new byte[bytesToWrite]; for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL (transaction #1) startTimeAL = System.currentTimeMillis(); // Create the file // Use a buffer size of 512 out = filesystem.create(filePath, true, 512, replFactor, blkSize); out.write(buffer); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); // Close the file / file output stream // Set up timers for measuring AL (transaction #2) startTimeAL = System.currentTimeMillis(); out.close(); totalTimeAL2 += (System.currentTimeMillis() - startTimeAL); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception recorded in op: " + "Create/Write/Close"); numOfExceptions++; } } } } /** * Open operation * @param name of the prefix of the putput file to be read * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doOpenReadOp(String name, Reporter reporter) { FSDataInputStream input; byte[] buffer = new byte[bytesToWrite]; for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL startTimeAL = System.currentTimeMillis(); input = filesystem.open(filePath); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); // If the file needs to be read (specified at command line) if (readFile) { startTimeAL = System.currentTimeMillis(); input.readFully(buffer); totalTimeAL2 += (System.currentTimeMillis() - startTimeAL); } input.close(); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception recorded in op: OpenRead " + e); numOfExceptions++; } } } } /** * Rename operation * @param name of prefix of the file to be renamed * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doRenameOp(String name, Reporter reporter) { for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); Path filePathR = new Path(new Path(baseDir, dataDirName), name + "_r_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL startTimeAL = System.currentTimeMillis(); filesystem.rename(filePath, filePathR); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception recorded in op: Rename"); numOfExceptions++; } } } } /** * Delete operation * @param name of prefix of the file to be deleted * @param reporter an instanse of (@link Reporter) to be used for * status' updates */ private void doDeleteOp(String name, Reporter reporter) { for (long l = 0l; l < numberOfFiles; l++) { Path filePath = new Path(new Path(baseDir, dataDirName), name + "_" + l); boolean successfulOp = false; while (! successfulOp && numOfExceptions < MAX_OPERATION_EXCEPTIONS) { try { // Set up timer for measuring AL startTimeAL = System.currentTimeMillis(); filesystem.delete(filePath, true); totalTimeAL1 += (System.currentTimeMillis() - startTimeAL); successfulOp = true; successfulFileOps ++; reporter.setStatus("Finish "+ l + " files"); } catch (IOException e) { LOG.info("Exception in recorded op: Delete"); numOfExceptions++; } } } } } /** * Reducer class */ static class NNBenchReducer extends MapReduceBase implements Reducer<Text, Text, Text, Text> { protected String hostName; public NNBenchReducer () { LOG.info("Starting NNBenchReducer !!!"); try { hostName = java.net.InetAddress.getLocalHost().getHostName(); } catch(Exception e) { hostName = "localhost"; } LOG.info("Starting NNBenchReducer on " + hostName); } /** * Reduce method */ public void reduce(Text key, Iterator<Text> values, OutputCollector<Text, Text> output, Reporter reporter ) throws IOException { String field = key.toString(); reporter.setStatus("starting " + field + " ::host = " + hostName); // sum long values if (field.startsWith("l:")) { long lSum = 0; while (values.hasNext()) { lSum += Long.parseLong(values.next().toString()); } output.collect(key, new Text(String.valueOf(lSum))); } if (field.startsWith("min:")) { long minVal = -1; while (values.hasNext()) { long value = Long.parseLong(values.next().toString()); if (minVal == -1) { minVal = value; } else { if (value != 0 && value < minVal) { minVal = value; } } } output.collect(key, new Text(String.valueOf(minVal))); } if (field.startsWith("max:")) { long maxVal = -1; while (values.hasNext()) { long value = Long.parseLong(values.next().toString()); if (maxVal == -1) { maxVal = value; } else { if (value > maxVal) { maxVal = value; } } } output.collect(key, new Text(String.valueOf(maxVal))); } reporter.setStatus("finished " + field + " ::host = " + hostName); } } }
googleapis/google-cloud-java
35,549
java-java-shopping-merchant-issue-resolution/grpc-google-shopping-merchant-issue-resolution-v1beta/src/main/java/com/google/shopping/merchant/issueresolution/v1beta/IssueResolutionServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.issueresolution.v1beta; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/shopping/merchant/issueresolution/v1beta/issueresolution.proto") @io.grpc.stub.annotations.GrpcGenerated public final class IssueResolutionServiceGrpc { private IssueResolutionServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.shopping.merchant.issueresolution.v1beta.IssueResolutionService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse> getRenderAccountIssuesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RenderAccountIssues", requestType = com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest.class, responseType = com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse> getRenderAccountIssuesMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse> getRenderAccountIssuesMethod; if ((getRenderAccountIssuesMethod = IssueResolutionServiceGrpc.getRenderAccountIssuesMethod) == null) { synchronized (IssueResolutionServiceGrpc.class) { if ((getRenderAccountIssuesMethod = IssueResolutionServiceGrpc.getRenderAccountIssuesMethod) == null) { IssueResolutionServiceGrpc.getRenderAccountIssuesMethod = getRenderAccountIssuesMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.issueresolution.v1beta .RenderAccountIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta .RenderAccountIssuesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "RenderAccountIssues")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.issueresolution.v1beta .RenderAccountIssuesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.issueresolution.v1beta .RenderAccountIssuesResponse.getDefaultInstance())) .setSchemaDescriptor( new IssueResolutionServiceMethodDescriptorSupplier("RenderAccountIssues")) .build(); } } } return getRenderAccountIssuesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse> getRenderProductIssuesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "RenderProductIssues", requestType = com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest.class, responseType = com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse> getRenderProductIssuesMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse> getRenderProductIssuesMethod; if ((getRenderProductIssuesMethod = IssueResolutionServiceGrpc.getRenderProductIssuesMethod) == null) { synchronized (IssueResolutionServiceGrpc.class) { if ((getRenderProductIssuesMethod = IssueResolutionServiceGrpc.getRenderProductIssuesMethod) == null) { IssueResolutionServiceGrpc.getRenderProductIssuesMethod = getRenderProductIssuesMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.issueresolution.v1beta .RenderProductIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta .RenderProductIssuesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "RenderProductIssues")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.issueresolution.v1beta .RenderProductIssuesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.issueresolution.v1beta .RenderProductIssuesResponse.getDefaultInstance())) .setSchemaDescriptor( new IssueResolutionServiceMethodDescriptorSupplier("RenderProductIssues")) .build(); } } } return getRenderProductIssuesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest, com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> getTriggerActionMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "TriggerAction", requestType = com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest.class, responseType = com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest, com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> getTriggerActionMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest, com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> getTriggerActionMethod; if ((getTriggerActionMethod = IssueResolutionServiceGrpc.getTriggerActionMethod) == null) { synchronized (IssueResolutionServiceGrpc.class) { if ((getTriggerActionMethod = IssueResolutionServiceGrpc.getTriggerActionMethod) == null) { IssueResolutionServiceGrpc.getTriggerActionMethod = getTriggerActionMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest, com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "TriggerAction")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.issueresolution.v1beta .TriggerActionRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.issueresolution.v1beta .TriggerActionResponse.getDefaultInstance())) .setSchemaDescriptor( new IssueResolutionServiceMethodDescriptorSupplier("TriggerAction")) .build(); } } } return getTriggerActionMethod; } /** Creates a new async stub that supports all call types for the service */ public static IssueResolutionServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceStub>() { @java.lang.Override public IssueResolutionServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceStub(channel, callOptions); } }; return IssueResolutionServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static IssueResolutionServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceBlockingV2Stub>() { @java.lang.Override public IssueResolutionServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceBlockingV2Stub(channel, callOptions); } }; return IssueResolutionServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static IssueResolutionServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceBlockingStub>() { @java.lang.Override public IssueResolutionServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceBlockingStub(channel, callOptions); } }; return IssueResolutionServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static IssueResolutionServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IssueResolutionServiceFutureStub>() { @java.lang.Override public IssueResolutionServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceFutureStub(channel, callOptions); } }; return IssueResolutionServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ public interface AsyncService { /** * * * <pre> * Provide a list of business's account issues with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ default void renderAccountIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getRenderAccountIssuesMethod(), responseObserver); } /** * * * <pre> * Provide a list of issues for business's product with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ default void renderProductIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getRenderProductIssuesMethod(), responseObserver); } /** * * * <pre> * Start an action. The action can be requested by a business in * third-party application. Before the business can request the action, the * third-party application needs to show them action specific content and * display a user input form. * The action can be successfully started only once all `required` inputs are * provided. If any `required` input is missing, or invalid value was * provided, the service will return 400 error. Validation errors will contain * [Ids][google.shopping.merchant.issueresolution.v1beta.InputField.id] for * all problematic field together with translated, human readable error * messages that can be shown to the user. * </pre> */ default void triggerAction( com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getTriggerActionMethod(), responseObserver); } } /** * Base class for the server implementation of the service IssueResolutionService. * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ public abstract static class IssueResolutionServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return IssueResolutionServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service IssueResolutionService. * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ public static final class IssueResolutionServiceStub extends io.grpc.stub.AbstractAsyncStub<IssueResolutionServiceStub> { private IssueResolutionServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IssueResolutionServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceStub(channel, callOptions); } /** * * * <pre> * Provide a list of business's account issues with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public void renderAccountIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRenderAccountIssuesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Provide a list of issues for business's product with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public void renderProductIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getRenderProductIssuesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Start an action. The action can be requested by a business in * third-party application. Before the business can request the action, the * third-party application needs to show them action specific content and * display a user input form. * The action can be successfully started only once all `required` inputs are * provided. If any `required` input is missing, or invalid value was * provided, the service will return 400 error. Validation errors will contain * [Ids][google.shopping.merchant.issueresolution.v1beta.InputField.id] for * all problematic field together with translated, human readable error * messages that can be shown to the user. * </pre> */ public void triggerAction( com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest request, io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getTriggerActionMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service IssueResolutionService. * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ public static final class IssueResolutionServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<IssueResolutionServiceBlockingV2Stub> { private IssueResolutionServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IssueResolutionServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Provide a list of business's account issues with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse renderAccountIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRenderAccountIssuesMethod(), getCallOptions(), request); } /** * * * <pre> * Provide a list of issues for business's product with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse renderProductIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRenderProductIssuesMethod(), getCallOptions(), request); } /** * * * <pre> * Start an action. The action can be requested by a business in * third-party application. Before the business can request the action, the * third-party application needs to show them action specific content and * display a user input form. * The action can be successfully started only once all `required` inputs are * provided. If any `required` input is missing, or invalid value was * provided, the service will return 400 error. Validation errors will contain * [Ids][google.shopping.merchant.issueresolution.v1beta.InputField.id] for * all problematic field together with translated, human readable error * messages that can be shown to the user. * </pre> */ public com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse triggerAction( com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getTriggerActionMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service IssueResolutionService. * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ public static final class IssueResolutionServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<IssueResolutionServiceBlockingStub> { private IssueResolutionServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IssueResolutionServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Provide a list of business's account issues with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse renderAccountIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRenderAccountIssuesMethod(), getCallOptions(), request); } /** * * * <pre> * Provide a list of issues for business's product with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse renderProductIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getRenderProductIssuesMethod(), getCallOptions(), request); } /** * * * <pre> * Start an action. The action can be requested by a business in * third-party application. Before the business can request the action, the * third-party application needs to show them action specific content and * display a user input form. * The action can be successfully started only once all `required` inputs are * provided. If any `required` input is missing, or invalid value was * provided, the service will return 400 error. Validation errors will contain * [Ids][google.shopping.merchant.issueresolution.v1beta.InputField.id] for * all problematic field together with translated, human readable error * messages that can be shown to the user. * </pre> */ public com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse triggerAction( com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getTriggerActionMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service * IssueResolutionService. * * <pre> * Service to provide an issue resolution content for account issues and product * issues. * </pre> */ public static final class IssueResolutionServiceFutureStub extends io.grpc.stub.AbstractFutureStub<IssueResolutionServiceFutureStub> { private IssueResolutionServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IssueResolutionServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IssueResolutionServiceFutureStub(channel, callOptions); } /** * * * <pre> * Provide a list of business's account issues with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesResponse> renderAccountIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRenderAccountIssuesMethod(), getCallOptions()), request); } /** * * * <pre> * Provide a list of issues for business's product with an issue resolution * content and available actions. This content and actions are meant to be * rendered and shown in third-party applications. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesResponse> renderProductIssues( com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getRenderProductIssuesMethod(), getCallOptions()), request); } /** * * * <pre> * Start an action. The action can be requested by a business in * third-party application. Before the business can request the action, the * third-party application needs to show them action specific content and * display a user input form. * The action can be successfully started only once all `required` inputs are * provided. If any `required` input is missing, or invalid value was * provided, the service will return 400 error. Validation errors will contain * [Ids][google.shopping.merchant.issueresolution.v1beta.InputField.id] for * all problematic field together with translated, human readable error * messages that can be shown to the user. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse> triggerAction( com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getTriggerActionMethod(), getCallOptions()), request); } } private static final int METHODID_RENDER_ACCOUNT_ISSUES = 0; private static final int METHODID_RENDER_PRODUCT_ISSUES = 1; private static final int METHODID_TRIGGER_ACTION = 2; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_RENDER_ACCOUNT_ISSUES: serviceImpl.renderAccountIssues( (com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta .RenderAccountIssuesResponse>) responseObserver); break; case METHODID_RENDER_PRODUCT_ISSUES: serviceImpl.renderProductIssues( (com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta .RenderProductIssuesResponse>) responseObserver); break; case METHODID_TRIGGER_ACTION: serviceImpl.triggerAction( (com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getRenderAccountIssuesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.issueresolution.v1beta.RenderAccountIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta .RenderAccountIssuesResponse>(service, METHODID_RENDER_ACCOUNT_ISSUES))) .addMethod( getRenderProductIssuesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.issueresolution.v1beta.RenderProductIssuesRequest, com.google.shopping.merchant.issueresolution.v1beta .RenderProductIssuesResponse>(service, METHODID_RENDER_PRODUCT_ISSUES))) .addMethod( getTriggerActionMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.issueresolution.v1beta.TriggerActionRequest, com.google.shopping.merchant.issueresolution.v1beta.TriggerActionResponse>( service, METHODID_TRIGGER_ACTION))) .build(); } private abstract static class IssueResolutionServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { IssueResolutionServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.shopping.merchant.issueresolution.v1beta.IssueResolutionProto .getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("IssueResolutionService"); } } private static final class IssueResolutionServiceFileDescriptorSupplier extends IssueResolutionServiceBaseDescriptorSupplier { IssueResolutionServiceFileDescriptorSupplier() {} } private static final class IssueResolutionServiceMethodDescriptorSupplier extends IssueResolutionServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; IssueResolutionServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (IssueResolutionServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new IssueResolutionServiceFileDescriptorSupplier()) .addMethod(getRenderAccountIssuesMethod()) .addMethod(getRenderProductIssuesMethod()) .addMethod(getTriggerActionMethod()) .build(); } } } return result; } }
apache/polaris
35,585
runtime/service/src/main/java/org/apache/polaris/service/catalog/iceberg/CatalogHandlerUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.polaris.service.catalog.iceberg; import static org.apache.iceberg.TableProperties.COMMIT_MAX_RETRY_WAIT_MS_DEFAULT; import static org.apache.iceberg.TableProperties.COMMIT_MIN_RETRY_WAIT_MS_DEFAULT; import static org.apache.iceberg.TableProperties.COMMIT_TOTAL_RETRY_TIME_MS_DEFAULT; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import jakarta.annotation.Nullable; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; import java.lang.reflect.Field; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import org.apache.iceberg.BaseMetadataTable; import org.apache.iceberg.BaseTable; import org.apache.iceberg.BaseTransaction; import org.apache.iceberg.DataOperations; import org.apache.iceberg.MetadataUpdate; import org.apache.iceberg.MetadataUpdate.UpgradeFormatVersion; import org.apache.iceberg.PartitionSpec; import org.apache.iceberg.Schema; import org.apache.iceberg.Snapshot; import org.apache.iceberg.SnapshotRef; import org.apache.iceberg.SortOrder; import org.apache.iceberg.Table; import org.apache.iceberg.TableMetadata; import org.apache.iceberg.TableOperations; import org.apache.iceberg.Transaction; import org.apache.iceberg.UpdateRequirement; import org.apache.iceberg.catalog.Catalog; import org.apache.iceberg.catalog.Namespace; import org.apache.iceberg.catalog.SupportsNamespaces; import org.apache.iceberg.catalog.TableIdentifier; import org.apache.iceberg.catalog.ViewCatalog; import org.apache.iceberg.exceptions.AlreadyExistsException; import org.apache.iceberg.exceptions.CommitFailedException; import org.apache.iceberg.exceptions.NoSuchNamespaceException; import org.apache.iceberg.exceptions.NoSuchTableException; import org.apache.iceberg.exceptions.NoSuchViewException; import org.apache.iceberg.rest.requests.CreateNamespaceRequest; import org.apache.iceberg.rest.requests.CreateTableRequest; import org.apache.iceberg.rest.requests.CreateViewRequest; import org.apache.iceberg.rest.requests.RegisterTableRequest; import org.apache.iceberg.rest.requests.RenameTableRequest; import org.apache.iceberg.rest.requests.UpdateNamespacePropertiesRequest; import org.apache.iceberg.rest.requests.UpdateTableRequest; import org.apache.iceberg.rest.responses.CreateNamespaceResponse; import org.apache.iceberg.rest.responses.GetNamespaceResponse; import org.apache.iceberg.rest.responses.ImmutableLoadViewResponse; import org.apache.iceberg.rest.responses.ListNamespacesResponse; import org.apache.iceberg.rest.responses.ListTablesResponse; import org.apache.iceberg.rest.responses.LoadTableResponse; import org.apache.iceberg.rest.responses.LoadViewResponse; import org.apache.iceberg.rest.responses.UpdateNamespacePropertiesResponse; import org.apache.iceberg.util.Pair; import org.apache.iceberg.util.PropertyUtil; import org.apache.iceberg.util.SnapshotUtil; import org.apache.iceberg.util.Tasks; import org.apache.iceberg.view.BaseView; import org.apache.iceberg.view.SQLViewRepresentation; import org.apache.iceberg.view.View; import org.apache.iceberg.view.ViewBuilder; import org.apache.iceberg.view.ViewMetadata; import org.apache.iceberg.view.ViewOperations; import org.apache.iceberg.view.ViewRepresentation; import org.apache.polaris.core.config.FeatureConfiguration; import org.apache.polaris.core.config.RealmConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * CODE_COPIED_TO_POLARIS Copied from CatalogHandler in Iceberg 1.8.0 Contains a collection of * utilities related to managing Iceberg entities */ @ApplicationScoped public class CatalogHandlerUtils { private static final Logger LOGGER = LoggerFactory.getLogger(CatalogHandlerUtils.class); private static final Schema EMPTY_SCHEMA = new Schema(); private static final String INITIAL_PAGE_TOKEN = ""; private static final String CONFLICT_RESOLUTION_ACTION = "polaris.internal.conflict-resolution.by-operation-type.replace"; private static final Field LAST_SEQUENCE_NUMBER_FIELD; static { try { LAST_SEQUENCE_NUMBER_FIELD = TableMetadata.Builder.class.getDeclaredField("lastSequenceNumber"); LAST_SEQUENCE_NUMBER_FIELD.setAccessible(true); } catch (NoSuchFieldException e) { throw new RuntimeException("Unable to access field", e); } } private final int maxCommitRetries; private final boolean rollbackCompactionEnabled; @Inject public CatalogHandlerUtils(RealmConfig realmConfig) { this( realmConfig.getConfig(FeatureConfiguration.ICEBERG_COMMIT_MAX_RETRIES), realmConfig.getConfig(FeatureConfiguration.ICEBERG_ROLLBACK_COMPACTION_ON_CONFLICTS)); } @VisibleForTesting public CatalogHandlerUtils(int maxCommitRetries, boolean rollbackCompactionEnabled) { this.maxCommitRetries = maxCommitRetries; this.rollbackCompactionEnabled = rollbackCompactionEnabled; } /** * Exception used to avoid retrying commits when assertions fail. * * <p>When a REST assertion fails, it will throw CommitFailedException to send back to the client. * But the assertion checks happen in the block that is retried if {@link * TableOperations#commit(TableMetadata, TableMetadata)} throws CommitFailedException. This is * used to avoid retries for assertion failures, which are unwrapped and rethrown outside of the * commit loop. */ private static class ValidationFailureException extends RuntimeException { private final CommitFailedException wrapped; private ValidationFailureException(CommitFailedException cause) { super(cause); this.wrapped = cause; } public CommitFailedException wrapped() { return wrapped; } } private <T> Pair<List<T>, String> paginate( List<T> list, @Nullable String pageToken, @Nullable Integer pageSize) { if (pageToken == null) { return Pair.of(list, null); } int pageStart = INITIAL_PAGE_TOKEN.equals(pageToken) ? 0 : Integer.parseInt(pageToken); if (pageStart >= list.size()) { return Pair.of(Collections.emptyList(), null); } // if pageSize is null, return the rest of the list pageSize = pageSize == null ? list.size() : pageSize; int end = Math.min(pageStart + pageSize, list.size()); List<T> subList = list.subList(pageStart, end); String nextPageToken = end >= list.size() ? null : String.valueOf(end); return Pair.of(subList, nextPageToken); } public ListNamespacesResponse listNamespaces(SupportsNamespaces catalog, Namespace parent) { List<Namespace> results; if (parent.isEmpty()) { results = catalog.listNamespaces(); } else { results = catalog.listNamespaces(parent); } return ListNamespacesResponse.builder().addAll(results).build(); } public ListNamespacesResponse listNamespaces( SupportsNamespaces catalog, Namespace parent, String pageToken, Integer pageSize) { List<Namespace> results; if (parent.isEmpty()) { results = catalog.listNamespaces(); } else { results = catalog.listNamespaces(parent); } Pair<List<Namespace>, String> page = paginate(results, pageToken, pageSize); return ListNamespacesResponse.builder() .addAll(page.first()) .nextPageToken(page.second()) .build(); } public CreateNamespaceResponse createNamespace( SupportsNamespaces catalog, CreateNamespaceRequest request) { Namespace namespace = request.namespace(); catalog.createNamespace(namespace, request.properties()); return CreateNamespaceResponse.builder() .withNamespace(namespace) .setProperties(catalog.loadNamespaceMetadata(namespace)) .build(); } public void namespaceExists(SupportsNamespaces catalog, Namespace namespace) { if (!catalog.namespaceExists(namespace)) { throw new NoSuchNamespaceException("Namespace does not exist: %s", namespace); } } public GetNamespaceResponse loadNamespace(SupportsNamespaces catalog, Namespace namespace) { Map<String, String> properties = catalog.loadNamespaceMetadata(namespace); return GetNamespaceResponse.builder() .withNamespace(namespace) .setProperties(properties) .build(); } public void dropNamespace(SupportsNamespaces catalog, Namespace namespace) { boolean dropped = catalog.dropNamespace(namespace); if (!dropped) { throw new NoSuchNamespaceException("Namespace does not exist: %s", namespace); } } public UpdateNamespacePropertiesResponse updateNamespaceProperties( SupportsNamespaces catalog, Namespace namespace, UpdateNamespacePropertiesRequest request) { request.validate(); Set<String> removals = Sets.newHashSet(request.removals()); Map<String, String> updates = request.updates(); Map<String, String> startProperties = catalog.loadNamespaceMetadata(namespace); Set<String> missing = Sets.difference(removals, startProperties.keySet()); if (!updates.isEmpty()) { catalog.setProperties(namespace, updates); } if (!removals.isEmpty()) { // remove the original set just in case there was an update just after loading properties catalog.removeProperties(namespace, removals); } return UpdateNamespacePropertiesResponse.builder() .addMissing(missing) .addUpdated(updates.keySet()) .addRemoved(Sets.difference(removals, missing)) .build(); } public ListTablesResponse listTables(Catalog catalog, Namespace namespace) { List<TableIdentifier> idents = catalog.listTables(namespace); return ListTablesResponse.builder().addAll(idents).build(); } public ListTablesResponse listTables( Catalog catalog, Namespace namespace, String pageToken, Integer pageSize) { List<TableIdentifier> results = catalog.listTables(namespace); Pair<List<TableIdentifier>, String> page = paginate(results, pageToken, pageSize); return ListTablesResponse.builder().addAll(page.first()).nextPageToken(page.second()).build(); } public LoadTableResponse stageTableCreate( Catalog catalog, Namespace namespace, CreateTableRequest request) { request.validate(); TableIdentifier ident = TableIdentifier.of(namespace, request.name()); if (catalog.tableExists(ident)) { throw new AlreadyExistsException("Table already exists: %s", ident); } Map<String, String> properties = Maps.newHashMap(); properties.put("created-at", OffsetDateTime.now(ZoneOffset.UTC).toString()); properties.putAll(request.properties()); String location; if (request.location() != null) { location = request.location(); } else { location = catalog .buildTable(ident, request.schema()) .withPartitionSpec(request.spec()) .withSortOrder(request.writeOrder()) .withProperties(properties) .createTransaction() .table() .location(); } TableMetadata metadata = TableMetadata.newTableMetadata( request.schema(), request.spec() != null ? request.spec() : PartitionSpec.unpartitioned(), request.writeOrder() != null ? request.writeOrder() : SortOrder.unsorted(), location, properties); return LoadTableResponse.builder().withTableMetadata(metadata).build(); } public LoadTableResponse createTable( Catalog catalog, Namespace namespace, CreateTableRequest request) { request.validate(); TableIdentifier ident = TableIdentifier.of(namespace, request.name()); Table table = catalog .buildTable(ident, request.schema()) .withLocation(request.location()) .withPartitionSpec(request.spec()) .withSortOrder(request.writeOrder()) .withProperties(request.properties()) .create(); if (table instanceof BaseTable baseTable) { return LoadTableResponse.builder() .withTableMetadata(baseTable.operations().current()) .build(); } throw new IllegalStateException("Cannot wrap catalog that does not produce BaseTable"); } public LoadTableResponse registerTable( Catalog catalog, Namespace namespace, RegisterTableRequest request) { request.validate(); TableIdentifier identifier = TableIdentifier.of(namespace, request.name()); Table table = catalog.registerTable(identifier, request.metadataLocation()); if (table instanceof BaseTable baseTable) { return LoadTableResponse.builder() .withTableMetadata(baseTable.operations().current()) .build(); } throw new IllegalStateException("Cannot wrap catalog that does not produce BaseTable"); } public void dropTable(Catalog catalog, TableIdentifier ident) { boolean dropped = catalog.dropTable(ident, false); if (!dropped) { throw new NoSuchTableException("Table does not exist: %s", ident); } } public void purgeTable(Catalog catalog, TableIdentifier ident) { boolean dropped = catalog.dropTable(ident, true); if (!dropped) { throw new NoSuchTableException("Table does not exist: %s", ident); } } public void tableExists(Catalog catalog, TableIdentifier ident) { boolean exists = catalog.tableExists(ident); if (!exists) { throw new NoSuchTableException("Table does not exist: %s", ident); } } public LoadTableResponse loadTable(Catalog catalog, TableIdentifier ident) { Table table = catalog.loadTable(ident); if (table instanceof BaseTable baseTable) { return LoadTableResponse.builder() .withTableMetadata(baseTable.operations().current()) .build(); } else if (table instanceof BaseMetadataTable) { // metadata tables are loaded on the client side, return NoSuchTableException for now throw new NoSuchTableException("Table does not exist: %s", ident.toString()); } throw new IllegalStateException("Cannot wrap catalog that does not produce BaseTable"); } public LoadTableResponse updateTable( Catalog catalog, TableIdentifier ident, UpdateTableRequest request) { TableMetadata finalMetadata; if (isCreate(request)) { // this is a hacky way to get TableOperations for an uncommitted table Transaction transaction = catalog.buildTable(ident, EMPTY_SCHEMA).createOrReplaceTransaction(); if (transaction instanceof BaseTransaction baseTransaction) { finalMetadata = create(baseTransaction.underlyingOps(), request); } else { throw new IllegalStateException( "Cannot wrap catalog that does not produce BaseTransaction"); } } else { Table table = catalog.loadTable(ident); if (table instanceof BaseTable baseTable) { TableOperations ops = baseTable.operations(); finalMetadata = commit(ops, request); } else { throw new IllegalStateException("Cannot wrap catalog that does not produce BaseTable"); } } return LoadTableResponse.builder().withTableMetadata(finalMetadata).build(); } public void renameTable(Catalog catalog, RenameTableRequest request) { catalog.renameTable(request.source(), request.destination()); } private boolean isCreate(UpdateTableRequest request) { boolean isCreate = request.requirements().stream() .anyMatch(UpdateRequirement.AssertTableDoesNotExist.class::isInstance); if (isCreate) { List<UpdateRequirement> invalidRequirements = request.requirements().stream() .filter(req -> !(req instanceof UpdateRequirement.AssertTableDoesNotExist)) .collect(Collectors.toList()); Preconditions.checkArgument( invalidRequirements.isEmpty(), "Invalid create requirements: %s", invalidRequirements); } return isCreate; } private TableMetadata create(TableOperations ops, UpdateTableRequest request) { // the only valid requirement is that the table will be created request.requirements().forEach(requirement -> requirement.validate(ops.current())); Optional<Integer> formatVersion = request.updates().stream() .filter(update -> update instanceof UpgradeFormatVersion) .map(update -> ((UpgradeFormatVersion) update).formatVersion()) .findFirst(); TableMetadata.Builder builder = formatVersion.map(TableMetadata::buildFromEmpty).orElseGet(TableMetadata::buildFromEmpty); request.updates().forEach(update -> update.applyTo(builder)); // create transactions do not retry. if the table exists, retrying is not a solution ops.commit(null, builder.build()); return ops.current(); } @VisibleForTesting public TableMetadata commit(TableOperations ops, UpdateTableRequest request) { AtomicBoolean isRetry = new AtomicBoolean(false); try { Tasks.foreach(ops) .retry(maxCommitRetries) .exponentialBackoff( COMMIT_MIN_RETRY_WAIT_MS_DEFAULT, COMMIT_MAX_RETRY_WAIT_MS_DEFAULT, COMMIT_TOTAL_RETRY_TIME_MS_DEFAULT, 2.0 /* exponential */) .onlyRetryOn(CommitFailedException.class) .run( taskOps -> { TableMetadata base = isRetry.get() ? taskOps.refresh() : taskOps.current(); TableMetadata.Builder metadataBuilder = TableMetadata.buildFrom(base); TableMetadata newBase = base; try { request.requirements().forEach((requirement) -> requirement.validate(base)); } catch (CommitFailedException e) { if (!rollbackCompactionEnabled) { // wrap and rethrow outside of tasks to avoid unnecessary retry throw new ValidationFailureException(e); } LOGGER.debug( "Attempting to Rollback replace operations for table={}, with current-snapshot-id={}", base.uuid(), base.currentSnapshot().snapshotId()); UpdateRequirement.AssertRefSnapshotID assertRefSnapshotId = findAssertRefSnapshotID(request); MetadataUpdate.SetSnapshotRef setSnapshotRef = findSetSnapshotRefUpdate(request); if (assertRefSnapshotId == null || setSnapshotRef == null) { // This implies the request was not trying to add a snapshot. LOGGER.debug( "Giving up on Rollback replace operations for table={}, with current-snapshot-id={}, as operation doesn't attempts to add a single snapshot", base.uuid(), base.currentSnapshot().snapshotId()); // wrap and rethrow outside of tasks to avoid unnecessary retry throw new ValidationFailureException(e); } // snapshot-id the client expects the table current_snapshot_id long expectedCurrentSnapshotId = assertRefSnapshotId.snapshotId(); MetadataUpdate.AddSnapshot snapshotToBeAdded = findAddSnapshotUpdate(request); if (snapshotToBeAdded == null) { // Re-throw if, there's no snapshot data to be added. // wrap and rethrow outside of tasks to avoid unnecessary retry throw new ValidationFailureException(e); } LOGGER.info( "Attempting to Rollback replace operation for table={}, with current-snapshot-id={}, to snapshot={}", base.uuid(), base.currentSnapshot().snapshotId(), snapshotToBeAdded.snapshot().snapshotId()); List<MetadataUpdate> metadataUpdates = generateUpdatesToRemoveNoopSnapshot( base, expectedCurrentSnapshotId, setSnapshotRef.name()); if (metadataUpdates == null || metadataUpdates.isEmpty()) { // Nothing can be done as this implies that there were not all // No-op snapshots (REPLACE) between expectedCurrentSnapshotId and // currentSnapshotId. hence re-throw the exception caught. // wrap and rethrow outside of tasks to avoid unnecessary retry throw new ValidationFailureException(e); } // Set back the ref we wanted to set, back to the snapshot-id // the client is expecting the table to be at. metadataBuilder.setBranchSnapshot( expectedCurrentSnapshotId, setSnapshotRef.name()); // apply the remove snapshots update in the current metadata. // NOTE: we need to setRef to expectedCurrentSnapshotId first and then apply // remove, as otherwise the remove will drop the reference. // NOTE: we can skip removing the now orphan base. It's not a hard requirement. // just something good to do, and not leave for Remove Orphans. // Ref rolled back update correctly to snapshot to be committed parent now. metadataUpdates.forEach((update -> update.applyTo(metadataBuilder))); newBase = setAppropriateLastSeqNumber( metadataBuilder, base.uuid(), base.lastSequenceNumber(), base.snapshot(expectedCurrentSnapshotId).sequenceNumber()) .build(); LOGGER.info( "Successfully roll-backed replace operation for table={}, with current-snapshot-id={}, to snapshot={}", base.uuid(), base.currentSnapshot().snapshotId(), newBase.currentSnapshot().snapshotId()); } // double check if the requirements passes now. try { TableMetadata baseWithRemovedSnaps = newBase; request .requirements() .forEach((requirement) -> requirement.validate(baseWithRemovedSnaps)); } catch (CommitFailedException e) { // wrap and rethrow outside of tasks to avoid unnecessary retry throw new ValidationFailureException(e); } TableMetadata.Builder newMetadataBuilder = TableMetadata.buildFrom(newBase); request.updates().forEach((update) -> update.applyTo(newMetadataBuilder)); TableMetadata updated = newMetadataBuilder.build(); if (updated.changes().isEmpty()) { // do not commit if the metadata has not changed return; } taskOps.commit(base, updated); }); } catch (ValidationFailureException e) { throw e.wrapped(); } return ops.current(); } private UpdateRequirement.AssertRefSnapshotID findAssertRefSnapshotID( UpdateTableRequest request) { UpdateRequirement.AssertRefSnapshotID assertRefSnapshotID = null; int total = 0; for (UpdateRequirement requirement : request.requirements()) { if (requirement instanceof UpdateRequirement.AssertRefSnapshotID assertRefSnapshotIDReq) { ++total; assertRefSnapshotID = assertRefSnapshotIDReq; } } // if > 1 assertion for refs, then it's not safe to roll back, make this Noop. return total != 1 ? null : assertRefSnapshotID; } private List<MetadataUpdate> generateUpdatesToRemoveNoopSnapshot( TableMetadata base, long expectedCurrentSnapshotId, String updateRefName) { // find the all the snapshots we want to retain which are not the part of current branch. Set<Long> idsToRetain = Sets.newHashSet(); for (Map.Entry<String, SnapshotRef> ref : base.refs().entrySet()) { String refName = ref.getKey(); SnapshotRef snapshotRef = ref.getValue(); if (refName.equals(updateRefName)) { continue; } idsToRetain.add(ref.getValue().snapshotId()); // Always check the ancestry for both branch and tags // mostly for case where a branch was created and then was dropped // then a tag was created and then rollback happened post that tag // was dropped and branch was re-created on it. for (Snapshot ancestor : SnapshotUtil.ancestorsOf(snapshotRef.snapshotId(), base::snapshot)) { idsToRetain.add(ancestor.snapshotId()); } } List<MetadataUpdate> updateToRemoveSnapshot = new ArrayList<>(); Long snapshotId = base.ref(updateRefName).snapshotId(); // current tip of the given branch // ensure this branch has the latest sequence number. long expectedSequenceNumber = base.lastSequenceNumber(); // Unexpected state as table's current sequence number is not equal to the // most recent snapshot the ref points to. if (expectedSequenceNumber != base.snapshot(snapshotId).sequenceNumber()) { LOGGER.debug( "Giving up rolling back table {} to snapshot {}, ref current snapshot sequence number {} is not equal expected sequence number {}", base.uuid(), snapshotId, base.snapshot(snapshotId).sequenceNumber(), expectedSequenceNumber); return null; } Set<Long> snapshotsToRemove = new LinkedHashSet<>(); while (snapshotId != null && !Objects.equals(snapshotId, expectedCurrentSnapshotId)) { Snapshot snap = base.snapshot(snapshotId); if (!isRollbackSnapshot(snap) || idsToRetain.contains(snapshotId)) { // Either encountered a non no-op snapshot or the snapshot is being referenced by any other // reference either by branch or a tag. LOGGER.debug( "Giving up rolling back table {} to snapshot {}, snapshot to be removed referenced by another branch or tag ancestor", base.uuid(), snapshotId); break; } snapshotsToRemove.add(snap.snapshotId()); snapshotId = snap.parentId(); } boolean wasExpectedSnapshotReached = Objects.equals(snapshotId, expectedCurrentSnapshotId); updateToRemoveSnapshot.add(new MetadataUpdate.RemoveSnapshots(snapshotsToRemove)); return wasExpectedSnapshotReached ? updateToRemoveSnapshot : null; } private boolean isRollbackSnapshot(Snapshot snapshot) { // Only Snapshots with {@ROLLBACKABLE_REPLACE_SNAPSHOT} are allowed to be rollback. return DataOperations.REPLACE.equals(snapshot.operation()) && PropertyUtil.propertyAsString(snapshot.summary(), CONFLICT_RESOLUTION_ACTION, "") .equalsIgnoreCase("rollback"); } private MetadataUpdate.SetSnapshotRef findSetSnapshotRefUpdate(UpdateTableRequest request) { int total = 0; MetadataUpdate.SetSnapshotRef setSnapshotRefUpdate = null; // find the SetRefName snapshot update for (MetadataUpdate update : request.updates()) { if (update instanceof MetadataUpdate.SetSnapshotRef setSnapshotRefUpd) { total++; setSnapshotRefUpdate = setSnapshotRefUpd; } } // if > 1 assertion for refs, then it's not safe to rollback, make this Noop. return total != 1 ? null : setSnapshotRefUpdate; } private MetadataUpdate.AddSnapshot findAddSnapshotUpdate(UpdateTableRequest request) { int total = 0; MetadataUpdate.AddSnapshot addSnapshot = null; // find the SetRefName snapshot update for (MetadataUpdate update : request.updates()) { if (update instanceof MetadataUpdate.AddSnapshot addSnapshotUpd) { total++; addSnapshot = addSnapshotUpd; } } // if > 1 assertion for addSnapshot, then it's not safe to rollback, make this Noop. return total != 1 ? null : addSnapshot; } private TableMetadata.Builder setAppropriateLastSeqNumber( TableMetadata.Builder metadataBuilder, String tableUUID, long currentSequenceNumber, long expectedSequenceNumber) { // TODO: Get rid of the reflection call once TableMetadata have API for it. // move the lastSequenceNumber back, to apply snapshot properly on the // current-metadata Seq number are considered increasing monotonically // snapshot over snapshot, the client generates the manifest list and hence // the sequence number can't be changed for a snapshot the only possible option // then is to change the sequenceNumber tracked by metadata.json try { // this should point to the sequence number that current tip of the // branch belongs to, as the new commit will be applied on top of this. LAST_SEQUENCE_NUMBER_FIELD.set(metadataBuilder, expectedSequenceNumber); LOGGER.info( "Setting table uuid:{} last sequence number from:{} to {}", tableUUID, currentSequenceNumber, expectedSequenceNumber); } catch (IllegalAccessException ex) { throw new RuntimeException(ex); } return metadataBuilder; } private BaseView asBaseView(View view) { Preconditions.checkState( view instanceof BaseView, "Cannot wrap catalog that does not produce BaseView"); return (BaseView) view; } public ListTablesResponse listViews(ViewCatalog catalog, Namespace namespace) { return ListTablesResponse.builder().addAll(catalog.listViews(namespace)).build(); } public ListTablesResponse listViews( ViewCatalog catalog, Namespace namespace, String pageToken, Integer pageSize) { List<TableIdentifier> results = catalog.listViews(namespace); Pair<List<TableIdentifier>, String> page = paginate(results, pageToken, pageSize); return ListTablesResponse.builder().addAll(page.first()).nextPageToken(page.second()).build(); } public LoadViewResponse createView( ViewCatalog catalog, Namespace namespace, CreateViewRequest request) { request.validate(); ViewBuilder viewBuilder = catalog .buildView(TableIdentifier.of(namespace, request.name())) .withSchema(request.schema()) .withProperties(request.properties()) .withDefaultNamespace(request.viewVersion().defaultNamespace()) .withDefaultCatalog(request.viewVersion().defaultCatalog()) .withLocation(request.location()); Set<String> unsupportedRepresentations = request.viewVersion().representations().stream() .filter(r -> !(r instanceof SQLViewRepresentation)) .map(ViewRepresentation::type) .collect(Collectors.toSet()); if (!unsupportedRepresentations.isEmpty()) { throw new IllegalStateException( String.format("Found unsupported view representations: %s", unsupportedRepresentations)); } request.viewVersion().representations().stream() .filter(SQLViewRepresentation.class::isInstance) .map(SQLViewRepresentation.class::cast) .forEach(r -> viewBuilder.withQuery(r.dialect(), r.sql())); View view = viewBuilder.create(); return viewResponse(view); } private LoadViewResponse viewResponse(View view) { ViewMetadata metadata = asBaseView(view).operations().current(); return ImmutableLoadViewResponse.builder() .metadata(metadata) .metadataLocation(metadata.metadataFileLocation()) .build(); } public void viewExists(ViewCatalog catalog, TableIdentifier viewIdentifier) { if (!catalog.viewExists(viewIdentifier)) { throw new NoSuchViewException("View does not exist: %s", viewIdentifier); } } public LoadViewResponse loadView(ViewCatalog catalog, TableIdentifier viewIdentifier) { View view = catalog.loadView(viewIdentifier); return viewResponse(view); } public LoadViewResponse updateView( ViewCatalog catalog, TableIdentifier ident, UpdateTableRequest request) { View view = catalog.loadView(ident); ViewMetadata metadata = commit(asBaseView(view).operations(), request); return ImmutableLoadViewResponse.builder() .metadata(metadata) .metadataLocation(metadata.metadataFileLocation()) .build(); } public void renameView(ViewCatalog catalog, RenameTableRequest request) { catalog.renameView(request.source(), request.destination()); } public void dropView(ViewCatalog catalog, TableIdentifier viewIdentifier) { boolean dropped = catalog.dropView(viewIdentifier); if (!dropped) { throw new NoSuchViewException("View does not exist: %s", viewIdentifier); } } protected ViewMetadata commit(ViewOperations ops, UpdateTableRequest request) { AtomicBoolean isRetry = new AtomicBoolean(false); try { Tasks.foreach(ops) .retry(maxCommitRetries) .exponentialBackoff( COMMIT_MIN_RETRY_WAIT_MS_DEFAULT, COMMIT_MAX_RETRY_WAIT_MS_DEFAULT, COMMIT_TOTAL_RETRY_TIME_MS_DEFAULT, 2.0 /* exponential */) .onlyRetryOn(CommitFailedException.class) .run( taskOps -> { ViewMetadata base = isRetry.get() ? taskOps.refresh() : taskOps.current(); isRetry.set(true); // validate requirements try { request.requirements().forEach(requirement -> requirement.validate(base)); } catch (CommitFailedException e) { // wrap and rethrow outside of tasks to avoid unnecessary retry throw new ValidationFailureException(e); } // apply changes ViewMetadata.Builder metadataBuilder = ViewMetadata.buildFrom(base); request.updates().forEach(update -> update.applyTo(metadataBuilder)); ViewMetadata updated = metadataBuilder.build(); if (updated.changes().isEmpty()) { // do not commit if the metadata has not changed return; } // commit taskOps.commit(base, updated); }); } catch (ValidationFailureException e) { throw e.wrapped(); } return ops.current(); } }
googleapis/google-cloud-java
35,347
java-meet/proto-google-cloud-meet-v2beta/src/main/java/com/google/apps/meet/v2beta/UpdateSpaceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/apps/meet/v2beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.apps.meet.v2beta; /** * * * <pre> * Request to update a space. * </pre> * * Protobuf type {@code google.apps.meet.v2beta.UpdateSpaceRequest} */ public final class UpdateSpaceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.apps.meet.v2beta.UpdateSpaceRequest) UpdateSpaceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateSpaceRequest.newBuilder() to construct. private UpdateSpaceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateSpaceRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateSpaceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_UpdateSpaceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_UpdateSpaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2beta.UpdateSpaceRequest.class, com.google.apps.meet.v2beta.UpdateSpaceRequest.Builder.class); } private int bitField0_; public static final int SPACE_FIELD_NUMBER = 1; private com.google.apps.meet.v2beta.Space space_; /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the space field is set. */ @java.lang.Override public boolean hasSpace() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The space. */ @java.lang.Override public com.google.apps.meet.v2beta.Space getSpace() { return space_ == null ? com.google.apps.meet.v2beta.Space.getDefaultInstance() : space_; } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.apps.meet.v2beta.SpaceOrBuilder getSpaceOrBuilder() { return space_ == null ? com.google.apps.meet.v2beta.Space.getDefaultInstance() : space_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSpace()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSpace()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.apps.meet.v2beta.UpdateSpaceRequest)) { return super.equals(obj); } com.google.apps.meet.v2beta.UpdateSpaceRequest other = (com.google.apps.meet.v2beta.UpdateSpaceRequest) obj; if (hasSpace() != other.hasSpace()) return false; if (hasSpace()) { if (!getSpace().equals(other.getSpace())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSpace()) { hash = (37 * hash) + SPACE_FIELD_NUMBER; hash = (53 * hash) + getSpace().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.apps.meet.v2beta.UpdateSpaceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update a space. * </pre> * * Protobuf type {@code google.apps.meet.v2beta.UpdateSpaceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.apps.meet.v2beta.UpdateSpaceRequest) com.google.apps.meet.v2beta.UpdateSpaceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_UpdateSpaceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_UpdateSpaceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.apps.meet.v2beta.UpdateSpaceRequest.class, com.google.apps.meet.v2beta.UpdateSpaceRequest.Builder.class); } // Construct using com.google.apps.meet.v2beta.UpdateSpaceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSpaceFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; space_ = null; if (spaceBuilder_ != null) { spaceBuilder_.dispose(); spaceBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.apps.meet.v2beta.ServiceProto .internal_static_google_apps_meet_v2beta_UpdateSpaceRequest_descriptor; } @java.lang.Override public com.google.apps.meet.v2beta.UpdateSpaceRequest getDefaultInstanceForType() { return com.google.apps.meet.v2beta.UpdateSpaceRequest.getDefaultInstance(); } @java.lang.Override public com.google.apps.meet.v2beta.UpdateSpaceRequest build() { com.google.apps.meet.v2beta.UpdateSpaceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.apps.meet.v2beta.UpdateSpaceRequest buildPartial() { com.google.apps.meet.v2beta.UpdateSpaceRequest result = new com.google.apps.meet.v2beta.UpdateSpaceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.apps.meet.v2beta.UpdateSpaceRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.space_ = spaceBuilder_ == null ? space_ : spaceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.apps.meet.v2beta.UpdateSpaceRequest) { return mergeFrom((com.google.apps.meet.v2beta.UpdateSpaceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.apps.meet.v2beta.UpdateSpaceRequest other) { if (other == com.google.apps.meet.v2beta.UpdateSpaceRequest.getDefaultInstance()) return this; if (other.hasSpace()) { mergeSpace(other.getSpace()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getSpaceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.apps.meet.v2beta.Space space_; private com.google.protobuf.SingleFieldBuilderV3< com.google.apps.meet.v2beta.Space, com.google.apps.meet.v2beta.Space.Builder, com.google.apps.meet.v2beta.SpaceOrBuilder> spaceBuilder_; /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the space field is set. */ public boolean hasSpace() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The space. */ public com.google.apps.meet.v2beta.Space getSpace() { if (spaceBuilder_ == null) { return space_ == null ? com.google.apps.meet.v2beta.Space.getDefaultInstance() : space_; } else { return spaceBuilder_.getMessage(); } } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSpace(com.google.apps.meet.v2beta.Space value) { if (spaceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } space_ = value; } else { spaceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSpace(com.google.apps.meet.v2beta.Space.Builder builderForValue) { if (spaceBuilder_ == null) { space_ = builderForValue.build(); } else { spaceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSpace(com.google.apps.meet.v2beta.Space value) { if (spaceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && space_ != null && space_ != com.google.apps.meet.v2beta.Space.getDefaultInstance()) { getSpaceBuilder().mergeFrom(value); } else { space_ = value; } } else { spaceBuilder_.mergeFrom(value); } if (space_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSpace() { bitField0_ = (bitField0_ & ~0x00000001); space_ = null; if (spaceBuilder_ != null) { spaceBuilder_.dispose(); spaceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.apps.meet.v2beta.Space.Builder getSpaceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSpaceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.apps.meet.v2beta.SpaceOrBuilder getSpaceOrBuilder() { if (spaceBuilder_ != null) { return spaceBuilder_.getMessageOrBuilder(); } else { return space_ == null ? com.google.apps.meet.v2beta.Space.getDefaultInstance() : space_; } } /** * * * <pre> * Required. Space to be updated. * </pre> * * <code>.google.apps.meet.v2beta.Space space = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.apps.meet.v2beta.Space, com.google.apps.meet.v2beta.Space.Builder, com.google.apps.meet.v2beta.SpaceOrBuilder> getSpaceFieldBuilder() { if (spaceBuilder_ == null) { spaceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.apps.meet.v2beta.Space, com.google.apps.meet.v2beta.Space.Builder, com.google.apps.meet.v2beta.SpaceOrBuilder>( getSpace(), getParentForChildren(), isClean()); space_ = null; } return spaceBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Field mask used to specify the fields to be updated in the space. * If update_mask isn't provided(not set, set with empty paths, or only has "" * as paths), it defaults to update all fields provided with values in the * request. * Using "*" as update_mask will update all fields, including deleting fields * not set in the request. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.apps.meet.v2beta.UpdateSpaceRequest) } // @@protoc_insertion_point(class_scope:google.apps.meet.v2beta.UpdateSpaceRequest) private static final com.google.apps.meet.v2beta.UpdateSpaceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.apps.meet.v2beta.UpdateSpaceRequest(); } public static com.google.apps.meet.v2beta.UpdateSpaceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateSpaceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateSpaceRequest>() { @java.lang.Override public UpdateSpaceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateSpaceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateSpaceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.apps.meet.v2beta.UpdateSpaceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,586
java-dialogflow-cx/google-cloud-dialogflow-cx/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/stub/PlaybooksStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3beta1.stub; import static com.google.cloud.dialogflow.cx.v3beta1.PlaybooksClient.ListLocationsPagedResponse; import static com.google.cloud.dialogflow.cx.v3beta1.PlaybooksClient.ListPlaybookVersionsPagedResponse; import static com.google.cloud.dialogflow.cx.v3beta1.PlaybooksClient.ListPlaybooksPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.dialogflow.cx.v3beta1.CreatePlaybookRequest; import com.google.cloud.dialogflow.cx.v3beta1.CreatePlaybookVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.DeletePlaybookRequest; import com.google.cloud.dialogflow.cx.v3beta1.DeletePlaybookVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.GetPlaybookRequest; import com.google.cloud.dialogflow.cx.v3beta1.GetPlaybookVersionRequest; import com.google.cloud.dialogflow.cx.v3beta1.ListPlaybookVersionsRequest; import com.google.cloud.dialogflow.cx.v3beta1.ListPlaybookVersionsResponse; import com.google.cloud.dialogflow.cx.v3beta1.ListPlaybooksRequest; import com.google.cloud.dialogflow.cx.v3beta1.ListPlaybooksResponse; import com.google.cloud.dialogflow.cx.v3beta1.Playbook; import com.google.cloud.dialogflow.cx.v3beta1.PlaybookVersion; import com.google.cloud.dialogflow.cx.v3beta1.UpdatePlaybookRequest; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link PlaybooksStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (dialogflow.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of createPlaybook: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * PlaybooksStubSettings.Builder playbooksSettingsBuilder = PlaybooksStubSettings.newBuilder(); * playbooksSettingsBuilder * .createPlaybookSettings() * .setRetrySettings( * playbooksSettingsBuilder * .createPlaybookSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * PlaybooksStubSettings playbooksSettings = playbooksSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. */ @BetaApi @Generated("by gapic-generator-java") public class PlaybooksStubSettings extends StubSettings<PlaybooksStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/dialogflow") .build(); private final UnaryCallSettings<CreatePlaybookRequest, Playbook> createPlaybookSettings; private final UnaryCallSettings<DeletePlaybookRequest, Empty> deletePlaybookSettings; private final PagedCallSettings< ListPlaybooksRequest, ListPlaybooksResponse, ListPlaybooksPagedResponse> listPlaybooksSettings; private final UnaryCallSettings<GetPlaybookRequest, Playbook> getPlaybookSettings; private final UnaryCallSettings<UpdatePlaybookRequest, Playbook> updatePlaybookSettings; private final UnaryCallSettings<CreatePlaybookVersionRequest, PlaybookVersion> createPlaybookVersionSettings; private final UnaryCallSettings<GetPlaybookVersionRequest, PlaybookVersion> getPlaybookVersionSettings; private final PagedCallSettings< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, ListPlaybookVersionsPagedResponse> listPlaybookVersionsSettings; private final UnaryCallSettings<DeletePlaybookVersionRequest, Empty> deletePlaybookVersionSettings; private final PagedCallSettings< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings; private static final PagedListDescriptor<ListPlaybooksRequest, ListPlaybooksResponse, Playbook> LIST_PLAYBOOKS_PAGE_STR_DESC = new PagedListDescriptor<ListPlaybooksRequest, ListPlaybooksResponse, Playbook>() { @Override public String emptyToken() { return ""; } @Override public ListPlaybooksRequest injectToken(ListPlaybooksRequest payload, String token) { return ListPlaybooksRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListPlaybooksRequest injectPageSize(ListPlaybooksRequest payload, int pageSize) { return ListPlaybooksRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListPlaybooksRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListPlaybooksResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Playbook> extractResources(ListPlaybooksResponse payload) { return payload.getPlaybooksList(); } }; private static final PagedListDescriptor< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, PlaybookVersion> LIST_PLAYBOOK_VERSIONS_PAGE_STR_DESC = new PagedListDescriptor< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, PlaybookVersion>() { @Override public String emptyToken() { return ""; } @Override public ListPlaybookVersionsRequest injectToken( ListPlaybookVersionsRequest payload, String token) { return ListPlaybookVersionsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListPlaybookVersionsRequest injectPageSize( ListPlaybookVersionsRequest payload, int pageSize) { return ListPlaybookVersionsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListPlaybookVersionsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListPlaybookVersionsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<PlaybookVersion> extractResources( ListPlaybookVersionsResponse payload) { return payload.getPlaybookVersionsList(); } }; private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location> LIST_LOCATIONS_PAGE_STR_DESC = new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() { @Override public String emptyToken() { return ""; } @Override public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) { return ListLocationsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) { return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListLocationsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListLocationsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Location> extractResources(ListLocationsResponse payload) { return payload.getLocationsList(); } }; private static final PagedListResponseFactory< ListPlaybooksRequest, ListPlaybooksResponse, ListPlaybooksPagedResponse> LIST_PLAYBOOKS_PAGE_STR_FACT = new PagedListResponseFactory< ListPlaybooksRequest, ListPlaybooksResponse, ListPlaybooksPagedResponse>() { @Override public ApiFuture<ListPlaybooksPagedResponse> getFuturePagedResponse( UnaryCallable<ListPlaybooksRequest, ListPlaybooksResponse> callable, ListPlaybooksRequest request, ApiCallContext context, ApiFuture<ListPlaybooksResponse> futureResponse) { PageContext<ListPlaybooksRequest, ListPlaybooksResponse, Playbook> pageContext = PageContext.create(callable, LIST_PLAYBOOKS_PAGE_STR_DESC, request, context); return ListPlaybooksPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, ListPlaybookVersionsPagedResponse> LIST_PLAYBOOK_VERSIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, ListPlaybookVersionsPagedResponse>() { @Override public ApiFuture<ListPlaybookVersionsPagedResponse> getFuturePagedResponse( UnaryCallable<ListPlaybookVersionsRequest, ListPlaybookVersionsResponse> callable, ListPlaybookVersionsRequest request, ApiCallContext context, ApiFuture<ListPlaybookVersionsResponse> futureResponse) { PageContext< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, PlaybookVersion> pageContext = PageContext.create( callable, LIST_PLAYBOOK_VERSIONS_PAGE_STR_DESC, request, context); return ListPlaybookVersionsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> LIST_LOCATIONS_PAGE_STR_FACT = new PagedListResponseFactory< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() { @Override public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse( UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable, ListLocationsRequest request, ApiCallContext context, ApiFuture<ListLocationsResponse> futureResponse) { PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext = PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context); return ListLocationsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to createPlaybook. */ public UnaryCallSettings<CreatePlaybookRequest, Playbook> createPlaybookSettings() { return createPlaybookSettings; } /** Returns the object with the settings used for calls to deletePlaybook. */ public UnaryCallSettings<DeletePlaybookRequest, Empty> deletePlaybookSettings() { return deletePlaybookSettings; } /** Returns the object with the settings used for calls to listPlaybooks. */ public PagedCallSettings<ListPlaybooksRequest, ListPlaybooksResponse, ListPlaybooksPagedResponse> listPlaybooksSettings() { return listPlaybooksSettings; } /** Returns the object with the settings used for calls to getPlaybook. */ public UnaryCallSettings<GetPlaybookRequest, Playbook> getPlaybookSettings() { return getPlaybookSettings; } /** Returns the object with the settings used for calls to updatePlaybook. */ public UnaryCallSettings<UpdatePlaybookRequest, Playbook> updatePlaybookSettings() { return updatePlaybookSettings; } /** Returns the object with the settings used for calls to createPlaybookVersion. */ public UnaryCallSettings<CreatePlaybookVersionRequest, PlaybookVersion> createPlaybookVersionSettings() { return createPlaybookVersionSettings; } /** Returns the object with the settings used for calls to getPlaybookVersion. */ public UnaryCallSettings<GetPlaybookVersionRequest, PlaybookVersion> getPlaybookVersionSettings() { return getPlaybookVersionSettings; } /** Returns the object with the settings used for calls to listPlaybookVersions. */ public PagedCallSettings< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, ListPlaybookVersionsPagedResponse> listPlaybookVersionsSettings() { return listPlaybookVersionsSettings; } /** Returns the object with the settings used for calls to deletePlaybookVersion. */ public UnaryCallSettings<DeletePlaybookVersionRequest, Empty> deletePlaybookVersionSettings() { return deletePlaybookVersionSettings; } /** Returns the object with the settings used for calls to listLocations. */ public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the object with the settings used for calls to getLocation. */ public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } public PlaybooksStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcPlaybooksStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonPlaybooksStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "dialogflow"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "dialogflow.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "dialogflow.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(PlaybooksStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(PlaybooksStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return PlaybooksStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected PlaybooksStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); createPlaybookSettings = settingsBuilder.createPlaybookSettings().build(); deletePlaybookSettings = settingsBuilder.deletePlaybookSettings().build(); listPlaybooksSettings = settingsBuilder.listPlaybooksSettings().build(); getPlaybookSettings = settingsBuilder.getPlaybookSettings().build(); updatePlaybookSettings = settingsBuilder.updatePlaybookSettings().build(); createPlaybookVersionSettings = settingsBuilder.createPlaybookVersionSettings().build(); getPlaybookVersionSettings = settingsBuilder.getPlaybookVersionSettings().build(); listPlaybookVersionsSettings = settingsBuilder.listPlaybookVersionsSettings().build(); deletePlaybookVersionSettings = settingsBuilder.deletePlaybookVersionSettings().build(); listLocationsSettings = settingsBuilder.listLocationsSettings().build(); getLocationSettings = settingsBuilder.getLocationSettings().build(); } /** Builder for PlaybooksStubSettings. */ public static class Builder extends StubSettings.Builder<PlaybooksStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<CreatePlaybookRequest, Playbook> createPlaybookSettings; private final UnaryCallSettings.Builder<DeletePlaybookRequest, Empty> deletePlaybookSettings; private final PagedCallSettings.Builder< ListPlaybooksRequest, ListPlaybooksResponse, ListPlaybooksPagedResponse> listPlaybooksSettings; private final UnaryCallSettings.Builder<GetPlaybookRequest, Playbook> getPlaybookSettings; private final UnaryCallSettings.Builder<UpdatePlaybookRequest, Playbook> updatePlaybookSettings; private final UnaryCallSettings.Builder<CreatePlaybookVersionRequest, PlaybookVersion> createPlaybookVersionSettings; private final UnaryCallSettings.Builder<GetPlaybookVersionRequest, PlaybookVersion> getPlaybookVersionSettings; private final PagedCallSettings.Builder< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, ListPlaybookVersionsPagedResponse> listPlaybookVersionsSettings; private final UnaryCallSettings.Builder<DeletePlaybookVersionRequest, Empty> deletePlaybookVersionSettings; private final PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings; private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); createPlaybookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deletePlaybookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listPlaybooksSettings = PagedCallSettings.newBuilder(LIST_PLAYBOOKS_PAGE_STR_FACT); getPlaybookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updatePlaybookSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createPlaybookVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getPlaybookVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listPlaybookVersionsSettings = PagedCallSettings.newBuilder(LIST_PLAYBOOK_VERSIONS_PAGE_STR_FACT); deletePlaybookVersionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT); getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createPlaybookSettings, deletePlaybookSettings, listPlaybooksSettings, getPlaybookSettings, updatePlaybookSettings, createPlaybookVersionSettings, getPlaybookVersionSettings, listPlaybookVersionsSettings, deletePlaybookVersionSettings, listLocationsSettings, getLocationSettings); initDefaults(this); } protected Builder(PlaybooksStubSettings settings) { super(settings); createPlaybookSettings = settings.createPlaybookSettings.toBuilder(); deletePlaybookSettings = settings.deletePlaybookSettings.toBuilder(); listPlaybooksSettings = settings.listPlaybooksSettings.toBuilder(); getPlaybookSettings = settings.getPlaybookSettings.toBuilder(); updatePlaybookSettings = settings.updatePlaybookSettings.toBuilder(); createPlaybookVersionSettings = settings.createPlaybookVersionSettings.toBuilder(); getPlaybookVersionSettings = settings.getPlaybookVersionSettings.toBuilder(); listPlaybookVersionsSettings = settings.listPlaybookVersionsSettings.toBuilder(); deletePlaybookVersionSettings = settings.deletePlaybookVersionSettings.toBuilder(); listLocationsSettings = settings.listLocationsSettings.toBuilder(); getLocationSettings = settings.getLocationSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( createPlaybookSettings, deletePlaybookSettings, listPlaybooksSettings, getPlaybookSettings, updatePlaybookSettings, createPlaybookVersionSettings, getPlaybookVersionSettings, listPlaybookVersionsSettings, deletePlaybookVersionSettings, listLocationsSettings, getLocationSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .createPlaybookSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deletePlaybookSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listPlaybooksSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getPlaybookSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .updatePlaybookSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createPlaybookVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getPlaybookVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listPlaybookVersionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .deletePlaybookVersionSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listLocationsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getLocationSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to createPlaybook. */ public UnaryCallSettings.Builder<CreatePlaybookRequest, Playbook> createPlaybookSettings() { return createPlaybookSettings; } /** Returns the builder for the settings used for calls to deletePlaybook. */ public UnaryCallSettings.Builder<DeletePlaybookRequest, Empty> deletePlaybookSettings() { return deletePlaybookSettings; } /** Returns the builder for the settings used for calls to listPlaybooks. */ public PagedCallSettings.Builder< ListPlaybooksRequest, ListPlaybooksResponse, ListPlaybooksPagedResponse> listPlaybooksSettings() { return listPlaybooksSettings; } /** Returns the builder for the settings used for calls to getPlaybook. */ public UnaryCallSettings.Builder<GetPlaybookRequest, Playbook> getPlaybookSettings() { return getPlaybookSettings; } /** Returns the builder for the settings used for calls to updatePlaybook. */ public UnaryCallSettings.Builder<UpdatePlaybookRequest, Playbook> updatePlaybookSettings() { return updatePlaybookSettings; } /** Returns the builder for the settings used for calls to createPlaybookVersion. */ public UnaryCallSettings.Builder<CreatePlaybookVersionRequest, PlaybookVersion> createPlaybookVersionSettings() { return createPlaybookVersionSettings; } /** Returns the builder for the settings used for calls to getPlaybookVersion. */ public UnaryCallSettings.Builder<GetPlaybookVersionRequest, PlaybookVersion> getPlaybookVersionSettings() { return getPlaybookVersionSettings; } /** Returns the builder for the settings used for calls to listPlaybookVersions. */ public PagedCallSettings.Builder< ListPlaybookVersionsRequest, ListPlaybookVersionsResponse, ListPlaybookVersionsPagedResponse> listPlaybookVersionsSettings() { return listPlaybookVersionsSettings; } /** Returns the builder for the settings used for calls to deletePlaybookVersion. */ public UnaryCallSettings.Builder<DeletePlaybookVersionRequest, Empty> deletePlaybookVersionSettings() { return deletePlaybookVersionSettings; } /** Returns the builder for the settings used for calls to listLocations. */ public PagedCallSettings.Builder< ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse> listLocationsSettings() { return listLocationsSettings; } /** Returns the builder for the settings used for calls to getLocation. */ public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() { return getLocationSettings; } @Override public PlaybooksStubSettings build() throws IOException { return new PlaybooksStubSettings(this); } } }
apache/doris-flink-connector
35,466
flink-doris-connector/src/main/java/org/apache/doris/flink/rest/RestService.java
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.apache.doris.flink.rest; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.util.Preconditions; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.doris.flink.cfg.ConfigurationOptions; import org.apache.doris.flink.cfg.DorisOptions; import org.apache.doris.flink.cfg.DorisReadOptions; import org.apache.doris.flink.exception.ConnectedFailedException; import org.apache.doris.flink.exception.DorisException; import org.apache.doris.flink.exception.DorisRuntimeException; import org.apache.doris.flink.exception.DorisSchemaChangeException; import org.apache.doris.flink.exception.IllegalArgumentException; import org.apache.doris.flink.exception.ShouldNeverHappenException; import org.apache.doris.flink.rest.models.BackendV2; import org.apache.doris.flink.rest.models.BackendV2.BackendRowV2; import org.apache.doris.flink.rest.models.QueryPlan; import org.apache.doris.flink.rest.models.Schema; import org.apache.doris.flink.rest.models.Tablet; import org.apache.doris.flink.sink.BackendUtil; import org.apache.doris.flink.sink.HttpGetWithEntity; import org.apache.http.HttpHeaders; import org.apache.http.HttpStatus; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.slf4j.Logger; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; import java.io.Serializable; import java.net.HttpURLConnection; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Scanner; import java.util.Set; import java.util.stream.Collectors; import static org.apache.doris.flink.cfg.ConfigurationOptions.DORIS_TABLET_SIZE; import static org.apache.doris.flink.cfg.ConfigurationOptions.DORIS_TABLET_SIZE_DEFAULT; import static org.apache.doris.flink.cfg.ConfigurationOptions.DORIS_TABLET_SIZE_MIN; import static org.apache.doris.flink.util.ErrorMessages.CONNECT_FAILED_MESSAGE; import static org.apache.doris.flink.util.ErrorMessages.ILLEGAL_ARGUMENT_MESSAGE; import static org.apache.doris.flink.util.ErrorMessages.SHOULD_NOT_HAPPEN_MESSAGE; /** Service for communicate with Doris FE. */ public class RestService implements Serializable { public static final int REST_RESPONSE_STATUS_OK = 200; public static final int REST_RESPONSE_CODE_OK = 0; private static final String REST_RESPONSE_BE_ROWS_KEY = "rows"; private static final String UNIQUE_KEYS_TYPE = "UNIQUE_KEYS"; @Deprecated private static final String BACKENDS = "/rest/v1/system?path=//backends"; private static final String BACKENDS_V2 = "/api/backends?is_alive=true"; private static final String FE_LOGIN = "/rest/v1/login"; private static final ObjectMapper objectMapper = new ObjectMapper(); private static final String TABLE_SCHEMA_API = "http://%s/api/%s/%s/_schema"; private static final String CATALOG_TABLE_SCHEMA_API = "http://%s/api/%s/%s/%s/_schema"; private static final String QUERY_PLAN_API = "http://%s/api/%s/%s/_query_plan"; private static final String STATEMENT_EXEC_API = "http://%s/api/query/default_cluster/information_schema"; /** * send request to Doris FE and get response json string. * * @param options configuration of request * @param request {@link HttpRequestBase} real request * @param logger {@link Logger} * @return Doris FE response in json string * @throws ConnectedFailedException throw when cannot connect to Doris FE */ private static String send( DorisOptions options, DorisReadOptions readOptions, HttpRequestBase request, Logger logger) throws ConnectedFailedException { int connectTimeout = readOptions.getRequestConnectTimeoutMs() == null ? ConfigurationOptions.DORIS_REQUEST_CONNECT_TIMEOUT_MS_DEFAULT : readOptions.getRequestConnectTimeoutMs(); int socketTimeout = readOptions.getRequestReadTimeoutMs() == null ? ConfigurationOptions.DORIS_REQUEST_READ_TIMEOUT_MS_DEFAULT : readOptions.getRequestReadTimeoutMs(); int retries = readOptions.getRequestRetries() == null ? ConfigurationOptions.DORIS_REQUEST_RETRIES_DEFAULT : readOptions.getRequestRetries(); logger.trace( "connect timeout set to '{}'. socket timeout set to '{}'. retries set to '{}'.", connectTimeout, socketTimeout, retries); RequestConfig requestConfig = RequestConfig.custom() .setConnectTimeout(connectTimeout) .setSocketTimeout(socketTimeout) .build(); request.setConfig(requestConfig); logger.info( "Send request to Doris FE '{}' with user '{}'.", request.getURI(), options.getUsername()); IOException ex = null; int statusCode = -1; for (int attempt = 0; attempt < retries; attempt++) { logger.debug("Attempt {} to request {}.", attempt, request.getURI()); try { String response; if (request instanceof HttpGet) { response = getConnectionGet(request, options, logger); } else { response = getConnectionPost(request, options, logger); } if (response == null) { logger.warn( "Failed to get response from Doris FE {}, http code is {}", request.getURI(), statusCode); continue; } logger.trace( "Success get response from Doris FE: {}, response is: {}.", request.getURI(), response); // Handle the problem of inconsistent data format returned by http v1 and v2 ObjectMapper mapper = new ObjectMapper(); Map map = mapper.readValue(response, Map.class); if (map.containsKey("code") && map.containsKey("msg")) { Object data = map.get("data"); return mapper.writeValueAsString(data); } else { return response; } } catch (IOException e) { ex = e; logger.warn(CONNECT_FAILED_MESSAGE, request.getURI(), e); } } logger.error(CONNECT_FAILED_MESSAGE, request.getURI(), ex); throw new ConnectedFailedException(request.getURI().toString(), statusCode, ex); } private static String getConnectionPost( HttpRequestBase request, DorisOptions dorisOptions, Logger logger) throws IOException { URL url = new URL(request.getURI().toString()); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setInstanceFollowRedirects(false); conn.setRequestMethod(request.getMethod()); conn.setRequestProperty("Authorization", authHeader(dorisOptions)); InputStream content = ((HttpPost) request).getEntity().getContent(); String res = IOUtils.toString(content); conn.setDoOutput(true); conn.setDoInput(true); conn.setConnectTimeout(request.getConfig().getConnectTimeout()); conn.setReadTimeout(request.getConfig().getSocketTimeout()); PrintWriter out = new PrintWriter(conn.getOutputStream()); // send request params out.print(res); // flush out.flush(); // read response return parseResponse(conn, logger); } private static String getConnectionGet( HttpRequestBase request, DorisOptions dorisOptions, Logger logger) throws IOException { URL realUrl = new URL(request.getURI().toString()); // open connection HttpURLConnection connection = (HttpURLConnection) realUrl.openConnection(); connection.setRequestProperty("Authorization", authHeader(dorisOptions)); connection.setConnectTimeout(request.getConfig().getConnectTimeout()); connection.setReadTimeout(request.getConfig().getSocketTimeout()); connection.connect(); return parseResponse(connection, logger); } @VisibleForTesting public static String parseResponse(HttpURLConnection connection, Logger logger) throws IOException { if (connection.getResponseCode() != HttpStatus.SC_OK) { logger.warn( "Failed to get response from Doris {}, http code is {}", connection.getURL(), connection.getResponseCode()); throw new IOException("Failed to get response from Doris"); } StringBuffer result = new StringBuffer(); try (Scanner scanner = new Scanner(connection.getInputStream(), "utf-8")) { while (scanner.hasNext()) { result.append(scanner.next()); } return result.toString(); } } /** * parse table identifier to array. * * @param tableIdentifier table identifier string * @param logger {@link Logger} * @return first element is db name, second element is table name * @throws IllegalArgumentException table identifier is illegal */ @VisibleForTesting public static String[] parseIdentifier(String tableIdentifier, Logger logger) throws IllegalArgumentException { logger.trace("Parse identifier '{}'.", tableIdentifier); if (StringUtils.isEmpty(tableIdentifier)) { logger.error(ILLEGAL_ARGUMENT_MESSAGE, "table.identifier", tableIdentifier); throw new IllegalArgumentException("table.identifier", tableIdentifier); } String[] identifier = tableIdentifier.split("\\."); // db.table or catalog.db.table if (identifier.length != 2 && identifier.length != 3) { logger.error(ILLEGAL_ARGUMENT_MESSAGE, "table.identifier", tableIdentifier); throw new IllegalArgumentException("table.identifier", tableIdentifier); } return identifier; } /** * choice a Doris FE node to request. * * @param feNodes Doris FE node list, separate be comma * @param logger slf4j logger * @return the chosen one Doris FE node * @throws IllegalArgumentException fe nodes is illegal */ @VisibleForTesting public static String randomEndpoint(String feNodes, Logger logger) throws IllegalArgumentException { logger.trace("Parse fenodes '{}'.", feNodes); if (StringUtils.isEmpty(feNodes)) { logger.error(ILLEGAL_ARGUMENT_MESSAGE, "fenodes", feNodes); throw new IllegalArgumentException("fenodes", feNodes); } List<String> nodes = Arrays.asList(feNodes.split(",")); Collections.shuffle(nodes); for (String feNode : nodes) { String host = feNode.trim(); if (BackendUtil.tryHttpConnection(host)) { return host; } } throw new DorisRuntimeException( "No Doris FE is available, please check configuration or cluster status."); } /** * choice a Doris FE node to request. * * @param feNodes Doris FE node list, separate be comma * @param logger slf4j logger * @return the array of Doris FE nodes * @throws IllegalArgumentException fe nodes is illegal */ @VisibleForTesting static List<String> allEndpoints(String feNodes, Logger logger) { logger.trace("Parse fenodes '{}'.", feNodes); if (StringUtils.isEmpty(feNodes)) { logger.error(ILLEGAL_ARGUMENT_MESSAGE, "fenodes", feNodes); throw new DorisRuntimeException("fenodes is empty"); } List<String> nodes = Arrays.stream(feNodes.split(",")).map(String::trim).collect(Collectors.toList()); Collections.shuffle(nodes); return nodes; } /** * get Doris BE nodes to request. * * @param options configuration of request * @param logger slf4j logger * @return the chosen one Doris BE node * @throws IllegalArgumentException BE nodes is illegal */ @VisibleForTesting public static List<BackendRowV2> getBackendsV2( DorisOptions options, DorisReadOptions readOptions, Logger logger) { String feNodes = options.getFenodes(); List<String> feNodeList = allEndpoints(feNodes, logger); if (options.isAutoRedirect() && !feNodeList.isEmpty()) { return convert(feNodeList); } for (String feNode : feNodeList) { try { String beUrl = "http://" + feNode + BACKENDS_V2; HttpGet httpGet = new HttpGet(beUrl); String response = send(options, readOptions, httpGet, logger); logger.info("Backend Info:{}", response); List<BackendRowV2> backends = parseBackendV2(response, logger); return backends; } catch (ConnectedFailedException e) { logger.info( "Doris FE node {} is unavailable: {}, Request the next Doris FE node", feNode, e.getMessage()); } } String errMsg = "No Doris FE is available, please check configuration"; logger.error(errMsg); throw new DorisRuntimeException(errMsg); } /** * When the user turns on redirection, there is no need to explicitly obtain the be list, just * treat the fe list as the be list. * * @param feNodeList * @return */ private static List<BackendRowV2> convert(List<String> feNodeList) { List<BackendRowV2> nodeList = new ArrayList<>(); for (String node : feNodeList) { String[] split = node.split(":"); nodeList.add(BackendRowV2.of(split[0], Integer.valueOf(split[1]), true)); } return nodeList; } static List<BackendRowV2> parseBackendV2(String response, Logger logger) { ObjectMapper mapper = new ObjectMapper(); BackendV2 backend; try { backend = mapper.readValue(response, BackendV2.class); } catch (IOException e) { String errMsg = "Parse Doris BE's response to json failed. res: " + response; logger.error(errMsg, e); throw new DorisRuntimeException(errMsg, e); } if (backend == null) { logger.error(SHOULD_NOT_HAPPEN_MESSAGE); throw new ShouldNeverHappenException(); } List<BackendRowV2> backendRows = backend.getBackends(); logger.debug("Parsing schema result is '{}'.", backendRows); return backendRows; } /** * discover Doris table schema from Doris FE. * * @param options configuration of request * @param logger slf4j logger * @return Doris table schema * @throws DorisException throw when discover failed */ public static Schema getSchema( DorisOptions options, DorisReadOptions readOptions, Logger logger) throws DorisException { logger.trace("Finding schema."); String[] tableIdentifier = parseIdentifier(options.getTableIdentifier(), logger); String tableSchemaUri; if (tableIdentifier.length == 2) { tableSchemaUri = String.format( TABLE_SCHEMA_API, randomEndpoint(options.getFenodes(), logger), tableIdentifier[0], tableIdentifier[1]); } else if (tableIdentifier.length == 3) { tableSchemaUri = String.format( CATALOG_TABLE_SCHEMA_API, randomEndpoint(options.getFenodes(), logger), tableIdentifier[0], tableIdentifier[1], tableIdentifier[2]); } else { throw new IllegalArgumentException( "table identifier is illegal, should be db.table or catalog.db.table"); } HttpGet httpGet = new HttpGet(tableSchemaUri); String response = send(options, readOptions, httpGet, logger); logger.debug("Find schema response is '{}'.", response); return parseSchema(response, logger); } @VisibleForTesting public static Schema getSchema( DorisOptions dorisOptions, String db, String table, Logger logger) { logger.trace("start get " + db + "." + table + " schema from doris."); Object responseData = null; try { String tableSchemaUri = String.format( TABLE_SCHEMA_API, randomEndpoint(dorisOptions.getFenodes(), logger), db, table); HttpGetWithEntity httpGet = new HttpGetWithEntity(tableSchemaUri); httpGet.setHeader(HttpHeaders.AUTHORIZATION, authHeader(dorisOptions)); JsonNode response = handleResponse(httpGet, logger); responseData = response.path("data"); String schemaStr = objectMapper.writeValueAsString(responseData); return objectMapper.readValue(schemaStr, Schema.class); } catch (JsonProcessingException | IllegalArgumentException e) { throw new DorisSchemaChangeException( "can not parse response schema " + responseData, e); } } @VisibleForTesting public static JsonNode handleResponse(HttpUriRequest request, Logger logger) { try (CloseableHttpClient httpclient = HttpClients.createDefault()) { CloseableHttpResponse response = httpclient.execute(request); final int statusCode = response.getStatusLine().getStatusCode(); final String reasonPhrase = response.getStatusLine().getReasonPhrase(); if (statusCode == 200 && response.getEntity() != null) { String responseEntity = EntityUtils.toString(response.getEntity()); return objectMapper.readTree(responseEntity); } else { throw new DorisRuntimeException( "Failed to parse response, status: " + statusCode + ", reason: " + reasonPhrase); } } catch (Exception e) { logger.trace("request error,", e); throw new DorisRuntimeException("request error with " + e.getMessage()); } } /** Try to get the ArrowFlightSqlPort port */ public static Integer tryGetArrowFlightSqlPort( DorisOptions options, DorisReadOptions readOptions, Logger logger) { if (readOptions.getFlightSqlPort() != null && readOptions.getFlightSqlPort() > 0) { return readOptions.getFlightSqlPort(); } try { Map<String, String> param = new HashMap<>(); param.put("stmt", "show frontends"); String requestUrl = String.format(STATEMENT_EXEC_API, randomEndpoint(options.getFenodes(), logger)); HttpPost httpPost = new HttpPost(requestUrl); httpPost.setHeader(HttpHeaders.AUTHORIZATION, authHeader(options)); httpPost.setHeader( HttpHeaders.CONTENT_TYPE, String.format("application/json;charset=%s", "UTF-8")); httpPost.setEntity(new StringEntity(objectMapper.writeValueAsString(param), "UTF-8")); JsonNode response = handleResponse(httpPost, logger); logger.info("Get ArrowFlightSqlPort response is '{}'.", response); return getArrowFlightSqlPort(response); } catch (Exception ex) { logger.warn("Failed to get ArrowFlightSqlPort, cause " + ex.getMessage()); return -1; } } @VisibleForTesting public static int getArrowFlightSqlPort(JsonNode rootNode) throws JsonProcessingException { JsonNode metaNode = rootNode.path("data").path("meta"); JsonNode dataNode = rootNode.path("data").path("data"); int columnIndex = -1; for (int i = 0; i < metaNode.size(); i++) { if ("ArrowFlightSqlPort".equals(metaNode.get(i).path("name").asText())) { columnIndex = i; break; } } if (columnIndex != -1 && dataNode.size() > 0) { int arrowFlightSqlPort = dataNode.get(0).get(columnIndex).asInt(); return arrowFlightSqlPort; } else { throw new java.lang.IllegalArgumentException("ArrowFlightSqlPort not found"); } } private static String authHeader(DorisOptions dorisOptions) { return "Basic " + new String( org.apache.commons.codec.binary.Base64.encodeBase64( (dorisOptions.getUsername() + ":" + dorisOptions.getPassword()) .getBytes(StandardCharsets.UTF_8))); } public static boolean isUniqueKeyType( DorisOptions options, DorisReadOptions readOptions, Logger logger) throws DorisRuntimeException { // disable 2pc in multi-table scenario if (StringUtils.isBlank(options.getTableIdentifier())) { logger.info("table model verification is skipped in multi-table scenarios."); return true; } try { return UNIQUE_KEYS_TYPE.equals(getSchema(options, readOptions, logger).getKeysType()); } catch (Exception e) { throw new DorisRuntimeException(e); } } /** * translate Doris FE response to inner {@link Schema} struct. * * @param response Doris FE response * @param logger {@link Logger} * @return inner {@link Schema} struct * @throws DorisException throw when translate failed */ @VisibleForTesting public static Schema parseSchema(String response, Logger logger) throws DorisException { logger.trace("Parse response '{}' to schema.", response); ObjectMapper mapper = new ObjectMapper(); Schema schema; try { schema = mapper.readValue(response, Schema.class); } catch (JsonParseException e) { String errMsg = "Doris FE's response is not a json. res: " + response; logger.error(errMsg, e); throw new DorisException(errMsg, e); } catch (JsonMappingException e) { String errMsg = "Doris FE's response cannot map to schema. res: " + response; logger.error(errMsg, e); throw new DorisException(errMsg, e); } catch (IOException e) { String errMsg = "Parse Doris FE's response to json failed. res: " + response; logger.error(errMsg, e); throw new DorisException(errMsg, e); } if (schema == null) { logger.error(SHOULD_NOT_HAPPEN_MESSAGE); throw new ShouldNeverHappenException(); } if (schema.getStatus() != REST_RESPONSE_STATUS_OK) { String errMsg = "Doris FE's response is not OK, status is " + schema.getStatus(); logger.error(errMsg); throw new DorisException(errMsg); } logger.debug("Parsing schema result is '{}'.", schema); return schema; } /** * find Doris partitions from Doris FE. * * @param options configuration of request * @param logger {@link Logger} * @return a list of Doris partitions * @throws DorisException throw when find partition failed */ public static List<PartitionDefinition> findPartitions( DorisOptions options, DorisReadOptions readOptions, Logger logger) throws DorisException { String[] tableIdentifiers = parseIdentifier(options.getTableIdentifier(), logger); Preconditions.checkArgument( tableIdentifiers.length == 2, "table identifier is illegal, should be db.table"); String readFields = StringUtils.isBlank(readOptions.getReadFields()) ? "*" : readOptions.getReadFields(); String sql = "select " + readFields + " from `" + tableIdentifiers[0] + "`.`" + tableIdentifiers[1] + "`"; if (!StringUtils.isEmpty(readOptions.getFilterQuery())) { sql += " where " + readOptions.getFilterQuery(); } logger.info("Query SQL Sending to Doris FE is: '{}'.", sql); String[] tableIdentifier = parseIdentifier(options.getTableIdentifier(), logger); String queryPlanUri = String.format( QUERY_PLAN_API, randomEndpoint(options.getFenodes(), logger), tableIdentifier[0], tableIdentifier[1]); HttpPost httpPost = new HttpPost(queryPlanUri); String entity = "{\"sql\": \"" + sql + "\"}"; logger.debug("Post body Sending to Doris FE is: '{}'.", entity); StringEntity stringEntity = new StringEntity(entity, StandardCharsets.UTF_8); stringEntity.setContentEncoding("UTF-8"); stringEntity.setContentType("application/json"); httpPost.setEntity(stringEntity); String resStr = send(options, readOptions, httpPost, logger); logger.debug("Find partition response is '{}'.", resStr); QueryPlan queryPlan = getQueryPlan(resStr, logger); Map<String, List<Long>> be2Tablets = selectBeForTablet(queryPlan, logger); return tabletsMapToPartition( options, readOptions, be2Tablets, queryPlan.getOpaquedQueryPlan(), tableIdentifiers[0], tableIdentifiers[1], logger); } /** * translate Doris FE response string to inner {@link QueryPlan} struct. * * @param response Doris FE response string * @param logger {@link Logger} * @return inner {@link QueryPlan} struct * @throws DorisException throw when translate failed. */ @VisibleForTesting static QueryPlan getQueryPlan(String response, Logger logger) throws DorisException { ObjectMapper mapper = new ObjectMapper(); QueryPlan queryPlan; try { queryPlan = mapper.readValue(response, QueryPlan.class); } catch (IOException e) { String errMsg = "Parse Doris FE's response to json failed. res: " + response; logger.error(errMsg, e); throw new DorisException(errMsg, e); } if (queryPlan == null) { logger.error(SHOULD_NOT_HAPPEN_MESSAGE + " res: " + response); throw new ShouldNeverHappenException(); } if (queryPlan.getStatus() != REST_RESPONSE_STATUS_OK) { String errMsg = "Doris FE's response is not OK, res: " + response; logger.error(errMsg); throw new DorisException(errMsg); } logger.debug("Parsing partition result is '{}'.", queryPlan); return queryPlan; } /** * select which Doris BE to get tablet data. * * @param queryPlan {@link QueryPlan} translated from Doris FE response * @param logger {@link Logger} * @return BE to tablets {@link Map} * @throws DorisException throw when select failed. */ @VisibleForTesting static Map<String, List<Long>> selectBeForTablet(QueryPlan queryPlan, Logger logger) throws DorisException { Map<String, List<Long>> be2Tablets = new HashMap<>(); for (Entry<String, Tablet> part : queryPlan.getPartitions().entrySet()) { logger.debug("Parse tablet info: '{}'.", part); long tabletId; try { tabletId = Long.parseLong(part.getKey()); } catch (NumberFormatException e) { String errMsg = "Parse tablet id '" + part.getKey() + "' to long failed."; logger.error(errMsg, e); throw new DorisException(errMsg, e); } String target = null; int tabletCount = Integer.MAX_VALUE; for (String candidate : part.getValue().getRoutings()) { logger.trace("Evaluate Doris BE '{}' to tablet '{}'.", candidate, tabletId); if (!be2Tablets.containsKey(candidate)) { logger.debug( "Choice a new Doris BE '{}' for tablet '{}'.", candidate, tabletId); List<Long> tablets = new ArrayList<>(); be2Tablets.put(candidate, tablets); target = candidate; break; } else { if (be2Tablets.get(candidate).size() < tabletCount) { target = candidate; tabletCount = be2Tablets.get(candidate).size(); logger.debug( "Current candidate Doris BE to tablet '{}' is '{}' with tablet count {}.", tabletId, target, tabletCount); } } } if (target == null) { String errMsg = "Cannot choice Doris BE for tablet " + tabletId; logger.error(errMsg); throw new DorisException(errMsg); } logger.debug("Choice Doris BE '{}' for tablet '{}'.", target, tabletId); be2Tablets.get(target).add(tabletId); } return be2Tablets; } /** * tablet count limit for one Doris RDD partition. * * @param readOptions configuration of request * @param logger {@link Logger} * @return tablet count limit */ @VisibleForTesting static int tabletCountLimitForOnePartition(DorisReadOptions readOptions, Logger logger) { int tabletsSize = DORIS_TABLET_SIZE_DEFAULT; if (readOptions.getRequestTabletSize() != null) { tabletsSize = readOptions.getRequestTabletSize(); } if (tabletsSize < DORIS_TABLET_SIZE_MIN) { logger.warn( "{} is less than {}, set to default value {}.", DORIS_TABLET_SIZE, DORIS_TABLET_SIZE_MIN, DORIS_TABLET_SIZE_MIN); tabletsSize = DORIS_TABLET_SIZE_MIN; } logger.debug("Tablet size is set to {}.", tabletsSize); return tabletsSize; } /** * translate BE tablets map to Doris RDD partition. * * @param options configuration of request * @param be2Tablets BE to tablets {@link Map} * @param opaquedQueryPlan Doris BE execute plan getting from Doris FE * @param database database name of Doris table * @param table table name of Doris table * @param logger {@link Logger} * @return Doris RDD partition {@link List} * @throws IllegalArgumentException throw when translate failed */ @VisibleForTesting static List<PartitionDefinition> tabletsMapToPartition( DorisOptions options, DorisReadOptions readOptions, Map<String, List<Long>> be2Tablets, String opaquedQueryPlan, String database, String table, Logger logger) throws IllegalArgumentException { int tabletsSize = tabletCountLimitForOnePartition(readOptions, logger); List<PartitionDefinition> partitions = new ArrayList<>(); for (Entry<String, List<Long>> beInfo : be2Tablets.entrySet()) { logger.debug("Generate partition with beInfo: '{}'.", beInfo); HashSet<Long> tabletSet = new HashSet<>(beInfo.getValue()); beInfo.getValue().clear(); beInfo.getValue().addAll(tabletSet); int first = 0; while (first < beInfo.getValue().size()) { Set<Long> partitionTablets = new HashSet<>( beInfo.getValue() .subList( first, Math.min( beInfo.getValue().size(), first + tabletsSize))); first = first + tabletsSize; PartitionDefinition partitionDefinition = new PartitionDefinition( database, table, beInfo.getKey(), partitionTablets, opaquedQueryPlan); logger.debug("Generate one PartitionDefinition '{}'.", partitionDefinition); partitions.add(partitionDefinition); } } return partitions; } }
apache/maven-archetype
35,356
archetype-common/src/main/java/org/apache/maven/archetype/generator/DefaultFilesetArchetypeGenerator.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.maven.archetype.generator; import javax.inject.Inject; import javax.inject.Named; import javax.inject.Singleton; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.TransformerException; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.nio.file.Files; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import groovy.lang.Binding; import groovy.lang.GroovyShell; import org.apache.maven.archetype.ArchetypeGenerationRequest; import org.apache.maven.archetype.common.ArchetypeArtifactManager; import org.apache.maven.archetype.common.ArchetypeFilesResolver; import org.apache.maven.archetype.common.Constants; import org.apache.maven.archetype.common.PomManager; import org.apache.maven.archetype.exception.ArchetypeGenerationFailure; import org.apache.maven.archetype.exception.ArchetypeNotConfigured; import org.apache.maven.archetype.exception.InvalidPackaging; import org.apache.maven.archetype.exception.OutputFileExists; import org.apache.maven.archetype.exception.PomFileExists; import org.apache.maven.archetype.exception.ProjectDirectoryExists; import org.apache.maven.archetype.exception.UnknownArchetype; import org.apache.maven.archetype.metadata.AbstractArchetypeDescriptor; import org.apache.maven.archetype.metadata.ArchetypeDescriptor; import org.apache.maven.archetype.metadata.FileSet; import org.apache.maven.archetype.metadata.ModuleDescriptor; import org.apache.maven.archetype.metadata.RequiredProperty; import org.apache.velocity.VelocityContext; import org.apache.velocity.context.Context; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.StringUtils; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import org.codehaus.plexus.velocity.VelocityComponent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.SAXException; @Named @Singleton public class DefaultFilesetArchetypeGenerator implements FilesetArchetypeGenerator { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultFilesetArchetypeGenerator.class); private ArchetypeArtifactManager archetypeArtifactManager; private ArchetypeFilesResolver archetypeFilesResolver; private PomManager pomManager; private VelocityComponent velocity; @Inject public DefaultFilesetArchetypeGenerator( ArchetypeArtifactManager archetypeArtifactManager, ArchetypeFilesResolver archetypeFilesResolver, PomManager pomManager, VelocityComponent velocity) { this.archetypeArtifactManager = archetypeArtifactManager; this.archetypeFilesResolver = archetypeFilesResolver; this.pomManager = pomManager; this.velocity = velocity; } /** * Pattern used to detect tokens in a string. Tokens are any text surrounded * by the delimiter <code>__</code>. */ private static final Pattern TOKEN_PATTERN = Pattern.compile("__((?:[^_]+_)*[^_]+)__"); @Override @SuppressWarnings("checkstyle:MethodLength") public void generateArchetype(ArchetypeGenerationRequest request, File archetypeFile) throws UnknownArchetype, ArchetypeNotConfigured, ProjectDirectoryExists, PomFileExists, OutputFileExists, ArchetypeGenerationFailure, InvalidPackaging { ClassLoader old = Thread.currentThread().getContextClassLoader(); try { ArchetypeDescriptor archetypeDescriptor = archetypeArtifactManager.getFileSetArchetypeDescriptor(archetypeFile); if (!isArchetypeConfigured(archetypeDescriptor, request)) { if (request.isInteractiveMode()) { throw new ArchetypeNotConfigured("No archetype was chosen.", null); } StringBuilder exceptionMessage = new StringBuilder( "Archetype " + request.getArchetypeGroupId() + ":" + request.getArchetypeArtifactId() + ":" + request.getArchetypeVersion() + " is not configured"); List<String> missingProperties = new ArrayList<>(0); for (RequiredProperty requiredProperty : archetypeDescriptor.getRequiredProperties()) { if (StringUtils.isEmpty(request.getProperties().getProperty(requiredProperty.getKey()))) { exceptionMessage.append("\n\tProperty " + requiredProperty.getKey() + " is missing."); missingProperties.add(requiredProperty.getKey()); } } throw new ArchetypeNotConfigured(exceptionMessage.toString(), missingProperties); } Context context = prepareVelocityContext(request); String packageName = request.getPackage(); String artifactId = request.getArtifactId(); File outputDirectoryFile = new File(request.getOutputDirectory(), artifactId); File basedirPom = new File(request.getOutputDirectory(), Constants.ARCHETYPE_POM); File pom = new File(outputDirectoryFile, Constants.ARCHETYPE_POM); List<String> archetypeResources = archetypeArtifactManager.getFilesetArchetypeResources(archetypeFile); ZipFile archetypeZipFile = archetypeArtifactManager.getArchetypeZipFile(archetypeFile); ClassLoader archetypeJarLoader = archetypeArtifactManager.getArchetypeJarLoader(archetypeFile); Thread.currentThread().setContextClassLoader(archetypeJarLoader); if (archetypeDescriptor.isPartial()) { LOGGER.debug("Processing partial archetype " + archetypeDescriptor.getName()); if (outputDirectoryFile.exists()) { if (!pom.exists()) { throw new PomFileExists("This is a partial archetype and the pom.xml file doesn't exist."); } processPomWithMerge(context, pom, ""); processArchetypeTemplatesWithWarning( archetypeDescriptor, archetypeResources, archetypeZipFile, "", context, packageName, outputDirectoryFile); } else { if (basedirPom.exists()) { processPomWithMerge(context, basedirPom, ""); processArchetypeTemplatesWithWarning( archetypeDescriptor, archetypeResources, archetypeZipFile, "", context, packageName, new File(request.getOutputDirectory())); } else { processPom(context, pom, ""); processArchetypeTemplates( archetypeDescriptor, archetypeResources, archetypeZipFile, "", context, packageName, outputDirectoryFile); } } if (!archetypeDescriptor.getModules().isEmpty()) { LOGGER.info("Modules ignored in partial mode"); } } else { LOGGER.debug("Processing complete archetype " + archetypeDescriptor.getName()); if (outputDirectoryFile.exists() && pom.exists()) { throw new ProjectDirectoryExists( "A Maven project already exists in the directory " + outputDirectoryFile.getPath()); } if (outputDirectoryFile.exists()) { LOGGER.warn("The directory " + outputDirectoryFile.getPath() + " already exists."); } context.put("rootArtifactId", artifactId); processFilesetModule( artifactId, artifactId, archetypeResources, pom, archetypeZipFile, "", basedirPom, outputDirectoryFile, packageName, archetypeDescriptor, context); } String postGenerationScript = archetypeArtifactManager.getPostGenerationScript(archetypeFile); if (postGenerationScript != null) { LOGGER.info("Executing " + Constants.ARCHETYPE_POST_GENERATION_SCRIPT + " post-generation script"); Binding binding = new Binding(); final Properties archetypeGeneratorProperties = new Properties(); archetypeGeneratorProperties.putAll(System.getProperties()); if (request.getProperties() != null) { archetypeGeneratorProperties.putAll(request.getProperties()); } for (Map.Entry<Object, Object> entry : archetypeGeneratorProperties.entrySet()) { binding.setVariable(entry.getKey().toString(), entry.getValue()); } binding.setVariable("request", request); GroovyShell shell = new GroovyShell(binding); shell.evaluate(postGenerationScript); } // ---------------------------------------------------------------------- // Log message on OldArchetype creation // ---------------------------------------------------------------------- if (LOGGER.isInfoEnabled()) { LOGGER.info("Project created from Archetype in dir: " + outputDirectoryFile.getAbsolutePath()); } } catch (IOException | XmlPullParserException | ParserConfigurationException | TransformerException | SAXException e) { throw new ArchetypeGenerationFailure(e); } finally { Thread.currentThread().setContextClassLoader(old); } } public String getPackageAsDirectory(String packageName) { return StringUtils.replace(packageName, ".", "/"); } private boolean copyFile( final File outFile, final String template, final boolean failIfExists, final ZipFile archetypeZipFile) throws OutputFileExists, IOException { LOGGER.debug("Copying file " + template); if (failIfExists && outFile.exists()) { throw new OutputFileExists("Don't rewrite file " + outFile.getName()); } else if (outFile.exists()) { LOGGER.warn("CP Don't override file " + outFile); return false; } ZipEntry input = archetypeZipFile.getEntry(Constants.ARCHETYPE_RESOURCES + "/" + template); if (input.isDirectory()) { outFile.mkdirs(); } else { outFile.getParentFile().mkdirs(); if (!outFile.exists() && !outFile.createNewFile()) { LOGGER.warn("Could not create new file \"" + outFile.getPath() + "\" or the file already exists."); } try (InputStream inputStream = archetypeZipFile.getInputStream(input); OutputStream out = Files.newOutputStream(outFile.toPath())) { IOUtil.copy(inputStream, out); } } return true; } @SuppressWarnings("checkstyle:ParameterNumber") private int copyFiles( String directory, List<String> fileSetResources, boolean packaged, String packageName, File outputDirectoryFile, ZipFile archetypeZipFile, String moduleOffset, boolean failIfExists, Context context) throws OutputFileExists, IOException { int count = 0; for (String template : fileSetResources) { File outputFile = getOutputFile( template, directory, outputDirectoryFile, packaged, packageName, moduleOffset, context); if (copyFile(outputFile, template, failIfExists, archetypeZipFile)) { count++; } } return count; } private String getEncoding(String archetypeEncoding) { return (archetypeEncoding == null || archetypeEncoding.isEmpty()) ? "UTF-8" : archetypeEncoding; } private String getOffsetSeparator(String moduleOffset) { return (moduleOffset == null || moduleOffset.isEmpty()) ? "/" : ("/" + moduleOffset + "/"); } private File getOutputFile( String template, String directory, File outputDirectoryFile, boolean packaged, String packageName, String moduleOffset, Context context) { String templateName = StringUtils.replaceOnce(template, directory, ""); String outputFileName = directory + "/" + (packaged ? getPackageAsDirectory(packageName) : "") + "/" + templateName.substring(moduleOffset.length()); outputFileName = replaceFilenameTokens(outputFileName, context); return new File(outputDirectoryFile, outputFileName); } /** * Replaces all tokens (text matching {@link #TOKEN_PATTERN}) within * the given string, using properties contained within the context. If a * property does not exist in the context, the token is left unmodified * and a warning is logged. * * @param filePath the file name and path to be interpolated * @param context contains the available properties */ private String replaceFilenameTokens(final String filePath, final Context context) { StringBuffer interpolatedResult = new StringBuffer(); Matcher matcher = TOKEN_PATTERN.matcher(filePath); while (matcher.find()) { String propertyToken = matcher.group(1); String contextPropertyValue = (String) context.get(propertyToken); if (contextPropertyValue != null && !contextPropertyValue.trim().isEmpty()) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Replacing property '" + propertyToken + "' in file path '" + filePath + "' with value '" + contextPropertyValue + "'."); } matcher.appendReplacement(interpolatedResult, contextPropertyValue); } else { // Need to skip the undefined property LOGGER.warn("Property '" + propertyToken + "' was not specified, so the token in '" + filePath + "' is not being replaced."); } } matcher.appendTail(interpolatedResult); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Final interpolated file path: '" + interpolatedResult + "'"); } return interpolatedResult.toString(); } private String getPackageInPathFormat(String aPackage) { return StringUtils.replace(aPackage, ".", "/"); } private boolean isArchetypeConfigured(ArchetypeDescriptor archetypeDescriptor, ArchetypeGenerationRequest request) { for (RequiredProperty requiredProperty : archetypeDescriptor.getRequiredProperties()) { if (StringUtils.isEmpty(request.getProperties().getProperty(requiredProperty.getKey()))) { return false; } } return true; } private void setParentArtifactId(Context context, String artifactId) { context.put(Constants.PARENT_ARTIFACT_ID, artifactId); } private Context prepareVelocityContext(ArchetypeGenerationRequest request) { Context context = new VelocityContext(); context.put(Constants.GROUP_ID, request.getGroupId()); context.put(Constants.ARTIFACT_ID, request.getArtifactId()); context.put(Constants.VERSION, request.getVersion()); context.put(Constants.PACKAGE, request.getPackage()); final String packageInPathFormat = getPackageInPathFormat(request.getPackage()); context.put(Constants.PACKAGE_IN_PATH_FORMAT, packageInPathFormat); if (LOGGER.isInfoEnabled()) { LOGGER.info("----------------------------------------------------------------------------"); LOGGER.info("Using following parameters for creating project from Archetype: " + request.getArchetypeArtifactId() + ":" + request.getArchetypeVersion()); LOGGER.info("----------------------------------------------------------------------------"); LOGGER.info("Parameter: " + Constants.GROUP_ID + ", Value: " + request.getGroupId()); LOGGER.info("Parameter: " + Constants.ARTIFACT_ID + ", Value: " + request.getArtifactId()); LOGGER.info("Parameter: " + Constants.VERSION + ", Value: " + request.getVersion()); LOGGER.info("Parameter: " + Constants.PACKAGE + ", Value: " + request.getPackage()); LOGGER.info("Parameter: " + Constants.PACKAGE_IN_PATH_FORMAT + ", Value: " + packageInPathFormat); } for (Iterator<?> iterator = request.getProperties().keySet().iterator(); iterator.hasNext(); ) { String key = (String) iterator.next(); String value = request.getProperties().getProperty(key); if (maybeVelocityExpression(value)) { value = evaluateExpression(context, key, value); } context.put(key, value); if (LOGGER.isInfoEnabled()) { LOGGER.info("Parameter: " + key + ", Value: " + value); } } return context; } private boolean maybeVelocityExpression(String value) { return value != null && value.contains("${"); } private String evaluateExpression(Context context, String key, String value) { try (StringWriter stringWriter = new StringWriter()) { velocity.getEngine().evaluate(context, stringWriter, key, value); return stringWriter.toString(); } catch (Exception ex) { return value; } } private void processArchetypeTemplates( AbstractArchetypeDescriptor archetypeDescriptor, List<String> archetypeResources, ZipFile archetypeZipFile, String moduleOffset, Context context, String packageName, File outputDirectoryFile) throws OutputFileExists, ArchetypeGenerationFailure, IOException { processTemplates( packageName, outputDirectoryFile, context, archetypeDescriptor, archetypeResources, archetypeZipFile, moduleOffset, false); } private void processArchetypeTemplatesWithWarning( ArchetypeDescriptor archetypeDescriptor, List<String> archetypeResources, ZipFile archetypeZipFile, String moduleOffset, Context context, String packageName, File outputDirectoryFile) throws OutputFileExists, ArchetypeGenerationFailure, IOException { processTemplates( packageName, outputDirectoryFile, context, archetypeDescriptor, archetypeResources, archetypeZipFile, moduleOffset, true); } @SuppressWarnings("checkstyle:ParameterNumber") private int processFileSet( String directory, List<String> fileSetResources, boolean packaged, String packageName, Context context, File outputDirectoryFile, String moduleOffset, String archetypeEncoding, boolean failIfExists) throws IOException, OutputFileExists, ArchetypeGenerationFailure { int count = 0; for (String template : fileSetResources) { File outputFile = getOutputFile( template, directory, outputDirectoryFile, packaged, packageName, moduleOffset, context); if (processTemplate( outputFile, context, Constants.ARCHETYPE_RESOURCES + "/" + template, archetypeEncoding, failIfExists)) { count++; } } return count; } @SuppressWarnings("checkstyle:ParameterNumber") private void processFilesetModule( final String rootArtifactId, final String artifactId, final List<String> archetypeResources, File pom, final ZipFile archetypeZipFile, String moduleOffset, File basedirPom, File outputDirectoryFile, final String packageName, final AbstractArchetypeDescriptor archetypeDescriptor, final Context context) throws XmlPullParserException, IOException, ParserConfigurationException, SAXException, TransformerException, OutputFileExists, ArchetypeGenerationFailure, InvalidPackaging { outputDirectoryFile.mkdirs(); LOGGER.debug("Processing module " + artifactId); LOGGER.debug("Processing module rootArtifactId " + rootArtifactId); LOGGER.debug("Processing module pom " + pom); LOGGER.debug("Processing module moduleOffset " + moduleOffset); LOGGER.debug("Processing module outputDirectoryFile " + outputDirectoryFile); processFilesetProject( archetypeDescriptor, StringUtils.replace(artifactId, "${rootArtifactId}", rootArtifactId), archetypeResources, pom, archetypeZipFile, moduleOffset, context, packageName, outputDirectoryFile, basedirPom); String parentArtifactId = (String) context.get(Constants.PARENT_ARTIFACT_ID); Iterator<ModuleDescriptor> subprojects = archetypeDescriptor.getModules().iterator(); if (subprojects.hasNext()) { LOGGER.debug(artifactId + " has modules (" + archetypeDescriptor.getModules() + ")"); setParentArtifactId(context, StringUtils.replace(artifactId, "${rootArtifactId}", rootArtifactId)); } while (subprojects.hasNext()) { ModuleDescriptor project = subprojects.next(); String modulePath = StringUtils.replace(project.getDir(), "__rootArtifactId__", rootArtifactId); modulePath = replaceFilenameTokens(modulePath, context); File moduleOutputDirectoryFile = new File(outputDirectoryFile, modulePath); context.put( Constants.ARTIFACT_ID, StringUtils.replace(project.getId(), "${rootArtifactId}", rootArtifactId)); String moduleArtifactId = StringUtils.replace(project.getDir(), "__rootArtifactId__", rootArtifactId); moduleArtifactId = replaceFilenameTokens(moduleArtifactId, context); processFilesetModule( rootArtifactId, moduleArtifactId, archetypeResources, new File(moduleOutputDirectoryFile, Constants.ARCHETYPE_POM), archetypeZipFile, ((moduleOffset == null || moduleOffset.isEmpty()) ? "" : (moduleOffset + "/")) + StringUtils.replace(project.getDir(), "${rootArtifactId}", rootArtifactId), pom, moduleOutputDirectoryFile, packageName, project, context); } restoreParentArtifactId(context, parentArtifactId); LOGGER.debug("Processed " + artifactId); } @SuppressWarnings("checkstyle:ParameterNumber") private void processFilesetProject( final AbstractArchetypeDescriptor archetypeDescriptor, final String moduleId, final List<String> archetypeResources, final File pom, final ZipFile archetypeZipFile, String moduleOffset, final Context context, final String packageName, final File outputDirectoryFile, final File basedirPom) throws XmlPullParserException, IOException, ParserConfigurationException, SAXException, TransformerException, OutputFileExists, ArchetypeGenerationFailure, InvalidPackaging { LOGGER.debug("Processing fileset project moduleId " + moduleId); LOGGER.debug("Processing fileset project pom " + pom); LOGGER.debug("Processing fileset project moduleOffset " + moduleOffset); LOGGER.debug("Processing fileset project outputDirectoryFile " + outputDirectoryFile); LOGGER.debug("Processing fileset project basedirPom " + basedirPom); if (basedirPom.exists()) { processPomWithParent(context, pom, moduleOffset, basedirPom, moduleId); } else { processPom(context, pom, moduleOffset); } processArchetypeTemplates( archetypeDescriptor, archetypeResources, archetypeZipFile, moduleOffset, context, packageName, outputDirectoryFile); } private void processPom(Context context, File pom, String moduleOffset) throws IOException, OutputFileExists, ArchetypeGenerationFailure { LOGGER.debug("Processing pom " + pom); processTemplate( pom, context, Constants.ARCHETYPE_RESOURCES + getOffsetSeparator(moduleOffset) + Constants.ARCHETYPE_POM, getEncoding(null), true); } private void processPomWithMerge(Context context, File pom, String moduleOffset) throws OutputFileExists, IOException, XmlPullParserException, ArchetypeGenerationFailure { LOGGER.debug("Processing pom " + pom + " with merge"); File temporaryPom = getTemporaryFile(pom); processTemplate( temporaryPom, context, Constants.ARCHETYPE_RESOURCES + getOffsetSeparator(moduleOffset) + Constants.ARCHETYPE_POM, getEncoding(null), true); pomManager.mergePoms(pom, temporaryPom); // getTemporaryFile sets deleteOnExit. Lets try to delete and then make sure deleteOnExit is // still set. Windows has issues deleting files with certain JDKs. try { FileUtils.forceDelete(temporaryPom); } catch (IOException e) { temporaryPom.deleteOnExit(); } } private void processPomWithParent(Context context, File pom, String moduleOffset, File basedirPom, String moduleId) throws XmlPullParserException, IOException, ParserConfigurationException, SAXException, TransformerException, OutputFileExists, ArchetypeGenerationFailure, InvalidPackaging { LOGGER.debug("Processing pom " + pom + " with parent " + basedirPom); processTemplate( pom, context, Constants.ARCHETYPE_RESOURCES + getOffsetSeparator(moduleOffset) + Constants.ARCHETYPE_POM, getEncoding(null), true); LOGGER.debug("Adding module " + moduleId); pomManager.addModule(basedirPom, moduleId); pomManager.addParent(pom, basedirPom); } @SuppressWarnings("deprecation") private boolean processTemplate( File outFile, Context context, String templateFileName, String encoding, boolean failIfExists) throws IOException, OutputFileExists, ArchetypeGenerationFailure { templateFileName = templateFileName.replace(File.separatorChar, '/'); String localTemplateFileName = templateFileName.replace('/', File.separatorChar); if (!templateFileName.equals(localTemplateFileName) && !velocity.getEngine().resourceExists(templateFileName) && velocity.getEngine().resourceExists(localTemplateFileName)) { templateFileName = localTemplateFileName; } LOGGER.debug("Processing template " + templateFileName); if (outFile.exists()) { if (failIfExists) { throw new OutputFileExists("Don't override file " + outFile.getAbsolutePath()); } LOGGER.warn("Don't override file " + outFile); return false; } if (templateFileName.endsWith("/")) { LOGGER.debug("Creating directory " + outFile); outFile.mkdirs(); return true; } if (!outFile.getParentFile().exists()) { outFile.getParentFile().mkdirs(); } if (!outFile.exists() && !outFile.createNewFile()) { LOGGER.warn("Could not create new file \"" + outFile.getPath() + "\" or the file already exists."); } LOGGER.debug("Merging into " + outFile); try (Writer writer = new OutputStreamWriter(Files.newOutputStream(outFile.toPath()), encoding)) { StringWriter stringWriter = new StringWriter(); velocity.getEngine().mergeTemplate(templateFileName, encoding, context, stringWriter); writer.write(StringUtils.unifyLineSeparators(stringWriter.toString())); } catch (Exception e) { throw new ArchetypeGenerationFailure("Error merging velocity templates: " + e.getMessage(), e); } return true; } @SuppressWarnings("checkstyle:ParameterNumber") private void processTemplates( String packageName, File outputDirectoryFile, Context context, AbstractArchetypeDescriptor archetypeDescriptor, List<String> archetypeResources, ZipFile archetypeZipFile, String moduleOffset, boolean failIfExists) throws OutputFileExists, ArchetypeGenerationFailure, IOException { Iterator<FileSet> iterator = archetypeDescriptor.getFileSets().iterator(); if (iterator.hasNext()) { LOGGER.debug("Processing filesets" + "\n " + archetypeResources); } int count = 0; while (iterator.hasNext()) { FileSet fileSet = iterator.next(); count++; final String includeCondition = fileSet.getIncludeCondition(); if (includeCondition != null && !includeCondition.isEmpty()) { final String evaluatedCondition = evaluateExpression(context, "includeCondition", includeCondition); if (!Boolean.parseBoolean(evaluatedCondition)) { LOGGER.debug(String.format( "Skipping fileset %s due to includeCondition: %s being: %s", fileSet, includeCondition, evaluatedCondition)); continue; } } List<String> fileSetResources = archetypeFilesResolver.filterFiles(moduleOffset, fileSet, archetypeResources); // This creates an empty directory, even if there is no file to process // Fix for ARCHETYPE-57 getOutputFile( moduleOffset, fileSet.getDirectory(), outputDirectoryFile, fileSet.isPackaged(), packageName, moduleOffset, context) .mkdirs(); if (fileSet.isFiltered()) { LOGGER.debug(" Processing fileset " + fileSet + " -> " + fileSetResources.size() + ":\n " + fileSetResources); int processed = processFileSet( fileSet.getDirectory(), fileSetResources, fileSet.isPackaged(), packageName, context, outputDirectoryFile, moduleOffset, getEncoding(fileSet.getEncoding()), failIfExists); LOGGER.debug(" Processed " + processed + " files."); } else { LOGGER.debug(" Copying fileset " + fileSet + " -> " + fileSetResources.size() + ":\n " + fileSetResources); int copied = copyFiles( fileSet.getDirectory(), fileSetResources, fileSet.isPackaged(), packageName, outputDirectoryFile, archetypeZipFile, moduleOffset, failIfExists, context); LOGGER.debug(" Copied " + copied + " files."); } } LOGGER.debug("Processed " + count + " filesets"); } private void restoreParentArtifactId(Context context, String parentArtifactId) { if (parentArtifactId == null || parentArtifactId.isEmpty()) { context.remove(Constants.PARENT_ARTIFACT_ID); } else { context.put(Constants.PARENT_ARTIFACT_ID, parentArtifactId); } } private File getTemporaryFile(File file) { File tmp = FileUtils.createTempFile(file.getName(), Constants.TMP, file.getParentFile()); tmp.deleteOnExit(); return tmp; } }
apache/flink
35,740
flink-tests/src/test/java/org/apache/flink/test/streaming/runtime/SortingBoundedInputITCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.streaming.runtime; import org.apache.flink.api.common.RuntimeExecutionMode; import org.apache.flink.api.common.eventtime.Watermark; import org.apache.flink.api.common.eventtime.WatermarkGenerator; import org.apache.flink.api.common.eventtime.WatermarkOutput; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.functions.OpenContext; import org.apache.flink.api.common.state.MapState; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.common.typeinfo.PrimitiveArrayTypeInfo; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.api.java.typeutils.TupleTypeInfo; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.ExecutionOptions; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.KeyedProcessFunction; import org.apache.flink.streaming.api.functions.co.KeyedCoProcessFunction; import org.apache.flink.streaming.api.operators.AbstractStreamOperator; import org.apache.flink.streaming.api.operators.AbstractStreamOperatorV2; import org.apache.flink.streaming.api.operators.BoundedMultiInput; import org.apache.flink.streaming.api.operators.BoundedOneInput; import org.apache.flink.streaming.api.operators.ChainingStrategy; import org.apache.flink.streaming.api.operators.Input; import org.apache.flink.streaming.api.operators.MultipleInputStreamOperator; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.api.operators.StreamOperator; import org.apache.flink.streaming.api.operators.StreamOperatorFactory; import org.apache.flink.streaming.api.operators.StreamOperatorParameters; import org.apache.flink.streaming.api.operators.TwoInputStreamOperator; import org.apache.flink.streaming.api.transformations.KeyedMultipleInputTransformation; import org.apache.flink.streaming.runtime.streamrecord.LatencyMarker; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.watermarkstatus.WatermarkStatus; import org.apache.flink.test.util.AbstractTestBaseJUnit4; import org.apache.flink.util.CollectionUtil; import org.apache.flink.util.Collector; import org.apache.flink.util.OutputTag; import org.apache.flink.util.SplittableIterator; import org.junit.Assert; import org.junit.Test; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Random; import java.util.Set; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** An end to end test for sorted inputs for a keyed operator with bounded inputs. */ public class SortingBoundedInputITCase extends AbstractTestBaseJUnit4 { @Test public void testOneInputOperator() throws Exception { long numberOfRecords = 1_000_000; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); env.configure(config, this.getClass().getClassLoader()); DataStreamSource<Tuple2<Integer, byte[]>> elements = env.fromParallelCollection( new InputGenerator(numberOfRecords), new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)); SingleOutputStreamOperator<Long> counts = elements.keyBy(element -> element.f0) .transform( "Asserting operator", BasicTypeInfo.LONG_TYPE_INFO, new AssertingOperator()); long sum = CollectionUtil.iteratorToList(counts.executeAndCollect()).stream() .mapToLong(l -> l) .sum(); assertThat(sum, equalTo(numberOfRecords)); } @Test public void testTwoInputOperator() throws Exception { long numberOfRecords = 500_000; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); env.configure(config, this.getClass().getClassLoader()); DataStreamSource<Tuple2<Integer, byte[]>> elements1 = env.fromParallelCollection( new InputGenerator(numberOfRecords), new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)); DataStreamSource<Tuple2<Integer, byte[]>> elements2 = env.fromParallelCollection( new InputGenerator(numberOfRecords), new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)); SingleOutputStreamOperator<Long> counts = elements1 .connect(elements2) .keyBy(element -> element.f0, element -> element.f0) .transform( "Asserting operator", BasicTypeInfo.LONG_TYPE_INFO, new AssertingTwoInputOperator()); long sum = CollectionUtil.iteratorToList(counts.executeAndCollect()).stream() .mapToLong(l -> l) .sum(); assertThat(sum, equalTo(numberOfRecords * 2)); } @Test public void testThreeInputOperator() throws Exception { long numberOfRecords = 500_000; StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); env.configure(config, this.getClass().getClassLoader()); KeyedStream<Tuple2<Integer, byte[]>, Object> elements1 = env.fromParallelCollection( new InputGenerator(numberOfRecords), new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)) .keyBy(el -> el.f0); KeyedStream<Tuple2<Integer, byte[]>, Object> elements2 = env.fromParallelCollection( new InputGenerator(numberOfRecords), new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)) .keyBy(el -> el.f0); KeyedStream<Tuple2<Integer, byte[]>, Object> elements3 = env.fromParallelCollection( new InputGenerator(numberOfRecords), new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO)) .keyBy(el -> el.f0); KeyedMultipleInputTransformation<Long> assertingTransformation = new KeyedMultipleInputTransformation<>( "Asserting operator", new AssertingThreeInputOperatorFactory(), BasicTypeInfo.LONG_TYPE_INFO, -1, BasicTypeInfo.INT_TYPE_INFO); assertingTransformation.addInput(elements1.getTransformation(), elements1.getKeySelector()); assertingTransformation.addInput(elements2.getTransformation(), elements2.getKeySelector()); assertingTransformation.addInput(elements3.getTransformation(), elements3.getKeySelector()); env.addOperator(assertingTransformation); DataStream<Long> counts = new DataStream<>(env, assertingTransformation); long sum = CollectionUtil.iteratorToList(counts.executeAndCollect()).stream() .mapToLong(l -> l) .sum(); assertThat(sum, equalTo(numberOfRecords * 3)); } @Test public void testBatchExecutionWithTimersOneInput() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); // set parallelism to 1 to have consistent order of results Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); env.configure(config, this.getClass().getClassLoader()); WatermarkStrategy<Tuple2<Integer, Integer>> watermarkStrategy = WatermarkStrategy.forGenerator(ctx -> GENERATE_WATERMARK_AFTER_4_14_TIMESTAMP) .withTimestampAssigner((r, previousTimestamp) -> r.f1); SingleOutputStreamOperator<Tuple2<Integer, Integer>> elements = env.fromData( Tuple2.of(1, 3), Tuple2.of(1, 1), Tuple2.of(2, 1), Tuple2.of(1, 4), // emit watermark = 5 Tuple2.of(2, 3), // late element Tuple2.of(1, 2), // late element Tuple2.of(1, 13), Tuple2.of(1, 11), Tuple2.of(2, 14), // emit watermark = 15 Tuple2.of(1, 11) // late element ) .assignTimestampsAndWatermarks(watermarkStrategy); OutputTag<Integer> lateElements = new OutputTag<>("late_elements", BasicTypeInfo.INT_TYPE_INFO); SingleOutputStreamOperator<Tuple3<Long, Integer, Integer>> sums = elements.map(element -> element.f0) .keyBy(element -> element) .process( new KeyedProcessFunction< Integer, Integer, Tuple3<Long, Integer, Integer>>() { private MapState<Long, Integer> countState; private ValueState<Long> previousTimestampState; @Override public void open(OpenContext openContext) { countState = getRuntimeContext() .getMapState( new MapStateDescriptor<>( "sum", BasicTypeInfo .LONG_TYPE_INFO, BasicTypeInfo .INT_TYPE_INFO)); previousTimestampState = getRuntimeContext() .getState( new ValueStateDescriptor<>( "previousTimestamp", BasicTypeInfo .LONG_TYPE_INFO)); } @Override public void processElement( Integer value, Context ctx, Collector<Tuple3<Long, Integer, Integer>> out) throws Exception { Long elementTimestamp = ctx.timestamp(); long nextTen = ((elementTimestamp + 10) / 10) * 10; ctx.timerService().registerEventTimeTimer(nextTen); if (elementTimestamp < ctx.timerService().currentWatermark()) { ctx.output(lateElements, value); } else { Long previousTimestamp = Optional.ofNullable( previousTimestampState.value()) .orElse(0L); assertThat( elementTimestamp, greaterThanOrEqualTo(previousTimestamp)); previousTimestampState.update(elementTimestamp); Integer currentCount = Optional.ofNullable(countState.get(nextTen)) .orElse(0); countState.put(nextTen, currentCount + 1); } } @Override public void onTimer( long timestamp, OnTimerContext ctx, Collector<Tuple3<Long, Integer, Integer>> out) throws Exception { out.collect( Tuple3.of( timestamp, ctx.getCurrentKey(), countState.get(timestamp))); countState.remove(timestamp); // this would go in infinite loop if we did not quiesce the // timer service. ctx.timerService().registerEventTimeTimer(timestamp + 1); } }); DataStream<Integer> lateStream = sums.getSideOutput(lateElements); List<Integer> lateRecordsCollected = CollectionUtil.iteratorToList(lateStream.executeAndCollect()); List<Tuple3<Long, Integer, Integer>> sumsCollected = CollectionUtil.iteratorToList(sums.executeAndCollect()); assertTrue(lateRecordsCollected.isEmpty()); assertThat( sumsCollected, equalTo( Arrays.asList( Tuple3.of(10L, 1, 4), Tuple3.of(20L, 1, 3), Tuple3.of(10L, 2, 2), Tuple3.of(20L, 2, 1)))); } @Test public void testBatchExecutionWithTimersTwoInput() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); // set parallelism to 1 to have consistent order of results Configuration config = new Configuration(); config.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH); env.configure(config, this.getClass().getClassLoader()); WatermarkStrategy<Tuple2<Integer, Integer>> watermarkStrategy = WatermarkStrategy.forGenerator(ctx -> GENERATE_WATERMARK_AFTER_4_14_TIMESTAMP) .withTimestampAssigner((r, previousTimestamp) -> r.f1); SingleOutputStreamOperator<Integer> elements1 = env.fromData( Tuple2.of(1, 3), Tuple2.of(1, 1), Tuple2.of(2, 1), Tuple2.of(1, 4), // emit watermark = 5 Tuple2.of(2, 3), // late element Tuple2.of(1, 2), // late element Tuple2.of(1, 13), Tuple2.of(1, 11), Tuple2.of(2, 14), // emit watermark = 15 Tuple2.of(1, 11) // late element ) .assignTimestampsAndWatermarks(watermarkStrategy) .map(element -> element.f0); SingleOutputStreamOperator<Integer> elements2 = env.fromData( Tuple2.of(1, 3), Tuple2.of(1, 1), Tuple2.of(2, 1), Tuple2.of(1, 4), // emit watermark = 5 Tuple2.of(2, 3), // late element Tuple2.of(1, 2), // late element Tuple2.of(1, 13), Tuple2.of(1, 11), Tuple2.of(2, 14), // emit watermark = 15 Tuple2.of(1, 11) // late element ) .assignTimestampsAndWatermarks(watermarkStrategy) .map(element -> element.f0); OutputTag<Integer> lateElements = new OutputTag<>("late_elements", BasicTypeInfo.INT_TYPE_INFO); SingleOutputStreamOperator<Tuple3<Long, Integer, Integer>> sums = elements1 .connect(elements2) .keyBy(element -> element, element -> element) .process( new KeyedCoProcessFunction< Integer, Integer, Integer, Tuple3<Long, Integer, Integer>>() { private MapState<Long, Integer> countState; private ValueState<Long> previousTimestampState; @Override public void open(OpenContext openContext) { countState = getRuntimeContext() .getMapState( new MapStateDescriptor<>( "sum", BasicTypeInfo .LONG_TYPE_INFO, BasicTypeInfo .INT_TYPE_INFO)); previousTimestampState = getRuntimeContext() .getState( new ValueStateDescriptor<>( "previousTimestamp", BasicTypeInfo .LONG_TYPE_INFO)); } @Override public void processElement1( Integer value, Context ctx, Collector<Tuple3<Long, Integer, Integer>> out) throws Exception { processElement(value, ctx); } @Override public void processElement2( Integer value, Context ctx, Collector<Tuple3<Long, Integer, Integer>> out) throws Exception { processElement(value, ctx); } private void processElement(Integer value, Context ctx) throws Exception { Long elementTimestamp = ctx.timestamp(); long nextTen = ((elementTimestamp + 10) / 10) * 10; ctx.timerService().registerEventTimeTimer(nextTen); if (elementTimestamp < ctx.timerService().currentWatermark()) { ctx.output(lateElements, value); } else { Long previousTimestamp = Optional.ofNullable( previousTimestampState.value()) .orElse(0L); assertThat( elementTimestamp, greaterThanOrEqualTo(previousTimestamp)); previousTimestampState.update(elementTimestamp); Integer currentCount = Optional.ofNullable(countState.get(nextTen)) .orElse(0); countState.put(nextTen, currentCount + 1); } } @Override public void onTimer( long timestamp, OnTimerContext ctx, Collector<Tuple3<Long, Integer, Integer>> out) throws Exception { out.collect( Tuple3.of( timestamp, ctx.getCurrentKey(), countState.get(timestamp))); countState.remove(timestamp); // this would go in infinite loop if we did not quiesce the // timer service. ctx.timerService().registerEventTimeTimer(timestamp + 1); } }); DataStream<Integer> lateStream = sums.getSideOutput(lateElements); List<Integer> lateRecordsCollected = CollectionUtil.iteratorToList(lateStream.executeAndCollect()); List<Tuple3<Long, Integer, Integer>> sumsCollected = CollectionUtil.iteratorToList(sums.executeAndCollect()); assertTrue(lateRecordsCollected.isEmpty()); assertThat( sumsCollected, equalTo( Arrays.asList( Tuple3.of(10L, 1, 8), Tuple3.of(20L, 1, 6), Tuple3.of(10L, 2, 4), Tuple3.of(20L, 2, 2)))); } private static final WatermarkGenerator<Tuple2<Integer, Integer>> GENERATE_WATERMARK_AFTER_4_14_TIMESTAMP = new WatermarkGenerator<Tuple2<Integer, Integer>>() { @Override public void onEvent( Tuple2<Integer, Integer> event, long eventTimestamp, WatermarkOutput output) { if (eventTimestamp == 4) { output.emitWatermark(new Watermark(5)); } else if (eventTimestamp == 14) { output.emitWatermark(new Watermark(15)); } } @Override public void onPeriodicEmit(WatermarkOutput output) {} }; private static class AssertingOperator extends AbstractStreamOperator<Long> implements OneInputStreamOperator<Tuple2<Integer, byte[]>, Long>, BoundedOneInput { private final Set<Integer> seenKeys = new HashSet<>(); private long seenRecords = 0; private Integer currentKey = null; @Override public void processElement(StreamRecord<Tuple2<Integer, byte[]>> element) throws Exception { this.seenRecords++; Integer incomingKey = element.getValue().f0; if (!Objects.equals(incomingKey, currentKey)) { if (!seenKeys.add(incomingKey)) { Assert.fail("Received an out of order key: " + incomingKey); } this.currentKey = incomingKey; } } @Override public void endInput() { output.collect(new StreamRecord<>(seenRecords)); } } private static class AssertingTwoInputOperator extends AbstractStreamOperator<Long> implements TwoInputStreamOperator< Tuple2<Integer, byte[]>, Tuple2<Integer, byte[]>, Long>, BoundedMultiInput { private final Set<Integer> seenKeys = new HashSet<>(); private long seenRecords = 0; private Integer currentKey = null; private boolean input1Finished = false; private boolean input2Finished = false; @Override public void processElement1(StreamRecord<Tuple2<Integer, byte[]>> element) { processElement(element); } @Override public void processElement2(StreamRecord<Tuple2<Integer, byte[]>> element) { processElement(element); } private void processElement(StreamRecord<Tuple2<Integer, byte[]>> element) { this.seenRecords++; Integer incomingKey = element.getValue().f0; if (!Objects.equals(incomingKey, currentKey)) { if (!seenKeys.add(incomingKey)) { Assert.fail("Received an out of order key: " + incomingKey); } this.currentKey = incomingKey; } } @Override public void endInput(int inputId) { if (inputId == 1) { input1Finished = true; } if (inputId == 2) { input2Finished = true; } if (input1Finished && input2Finished) { output.collect(new StreamRecord<>(seenRecords)); } } } private static class AssertingThreeInputOperator extends AbstractStreamOperatorV2<Long> implements MultipleInputStreamOperator<Long>, BoundedMultiInput { private final Set<Integer> seenKeys = new HashSet<>(); private long seenRecords = 0; private Integer currentKey = null; private boolean input1Finished = false; private boolean input2Finished = false; private boolean input3Finished = false; public AssertingThreeInputOperator( StreamOperatorParameters<Long> parameters, int numberOfInputs) { super(parameters, 3); assert numberOfInputs == 3; } private void processElement(Tuple2<Integer, byte[]> element) { this.seenRecords++; Integer incomingKey = element.f0; if (!Objects.equals(incomingKey, currentKey)) { if (!seenKeys.add(incomingKey)) { Assert.fail("Received an out of order key: " + incomingKey); } this.currentKey = incomingKey; } } @Override public void endInput(int inputId) { if (inputId == 1) { input1Finished = true; } if (inputId == 2) { input2Finished = true; } if (inputId == 3) { input3Finished = true; } if (input1Finished && input2Finished && input3Finished) { output.collect(new StreamRecord<>(seenRecords)); } } @Override public List<Input> getInputs() { return Arrays.asList( new SingleInput(this::processElement), new SingleInput(this::processElement), new SingleInput(this::processElement)); } } private static class AssertingThreeInputOperatorFactory implements StreamOperatorFactory<Long> { @Override @SuppressWarnings("unchecked") public <T extends StreamOperator<Long>> T createStreamOperator( StreamOperatorParameters<Long> parameters) { return (T) new AssertingThreeInputOperator(parameters, 3); } @Override public void setChainingStrategy(ChainingStrategy strategy) {} @Override public ChainingStrategy getChainingStrategy() { return ChainingStrategy.NEVER; } @Override public Class<? extends StreamOperator> getStreamOperatorClass(ClassLoader classLoader) { return AssertingThreeInputOperator.class; } } private static class SingleInput implements Input<Tuple2<Integer, byte[]>> { private final Consumer<Tuple2<Integer, byte[]>> recordConsumer; private SingleInput(Consumer<Tuple2<Integer, byte[]>> recordConsumer) { this.recordConsumer = recordConsumer; } @Override public void processElement(StreamRecord<Tuple2<Integer, byte[]>> element) throws Exception { recordConsumer.accept(element.getValue()); } @Override public void processWatermark(org.apache.flink.streaming.api.watermark.Watermark mark) {} @Override public void processLatencyMarker(LatencyMarker latencyMarker) {} @Override public void setKeyContextElement(StreamRecord<Tuple2<Integer, byte[]>> record) {} @Override public void processWatermarkStatus(WatermarkStatus watermarkStatus) throws Exception {} } private static class InputGenerator extends SplittableIterator<Tuple2<Integer, byte[]>> { private final long numberOfRecords; private long generatedRecords; private final Random rnd = new Random(); private final byte[] bytes = new byte[500]; private InputGenerator(long numberOfRecords) { this.numberOfRecords = numberOfRecords; rnd.nextBytes(bytes); } @Override @SuppressWarnings("unchecked") public Iterator<Tuple2<Integer, byte[]>>[] split(int numPartitions) { long numberOfRecordsPerPartition = numberOfRecords / numPartitions; long remainder = numberOfRecords % numPartitions; Iterator<Tuple2<Integer, byte[]>>[] iterators = new Iterator[numPartitions]; for (int i = 0; i < numPartitions - 1; i++) { iterators[i] = new InputGenerator(numberOfRecordsPerPartition); } iterators[numPartitions - 1] = new InputGenerator(numberOfRecordsPerPartition + remainder); return iterators; } @Override public int getMaximumNumberOfSplits() { return (int) Math.min(numberOfRecords, Integer.MAX_VALUE); } @Override public boolean hasNext() { return generatedRecords < numberOfRecords; } @Override public Tuple2<Integer, byte[]> next() { if (hasNext()) { generatedRecords++; return Tuple2.of(rnd.nextInt(10), bytes); } return null; } } }